KEYCLOAK-4384 Remove Mongo support

This commit is contained in:
Stian Thorgersen 2017-02-06 20:22:43 +01:00
parent 06da1d164d
commit 49ac3587b6
164 changed files with 16 additions and 18214 deletions

View file

@ -85,15 +85,6 @@
<groupId>org.keycloak</groupId> <groupId>org.keycloak</groupId>
<artifactId>keycloak-saml-core</artifactId> <artifactId>keycloak-saml-core</artifactId>
</dependency> </dependency>
<!-- mongo -->
<dependency>
<groupId>org.keycloak</groupId>
<artifactId>keycloak-model-mongo</artifactId>
</dependency>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-driver</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.liquibase</groupId> <groupId>org.liquibase</groupId>

View file

@ -51,12 +51,7 @@ if (result == []) of /profile=$clusteredProfile/subsystem=keycloak-server/:read-
echo echo
end-if end-if
# Find if we are using jpa or mongo set persistenceProvider=jpa
if (result == mongo) of /profile=$clusteredProfile/subsystem=keycloak-server/spi=realm/:read-attribute(name=default-provider)
set persistenceProvider=mongo
else
set persistenceProvider=jpa
end-if
# Migrate from 2.1.0 to 2.2.0 # Migrate from 2.1.0 to 2.2.0
if (outcome == failed) of /profile=$clusteredProfile/subsystem=infinispan/cache-container=keycloak/distributed-cache=authorization/:read-resource if (outcome == failed) of /profile=$clusteredProfile/subsystem=infinispan/cache-container=keycloak/distributed-cache=authorization/:read-resource

View file

@ -63,12 +63,7 @@ if (result == []) of /profile=$standaloneProfile/subsystem=keycloak-server/:read
echo echo
end-if end-if
# Find if we are using jpa or mongo set persistenceProvider=jpa
if (result == mongo) of /profile=$standaloneProfile/subsystem=keycloak-server/spi=realm/:read-attribute(name=default-provider)
set persistenceProvider=mongo
else
set persistenceProvider=jpa
end-if
# Migrate from 2.1.0 to 2.2.0 # Migrate from 2.1.0 to 2.2.0
if (result == update) of /profile=$standaloneProfile/subsystem=keycloak-server/spi=connectionsJpa/provider=default/:map-get(name=properties,key=databaseSchema) if (result == update) of /profile=$standaloneProfile/subsystem=keycloak-server/spi=connectionsJpa/provider=default/:map-get(name=properties,key=databaseSchema)

View file

@ -42,12 +42,7 @@ if (result == []) of /subsystem=keycloak-server/:read-children-names(child-type=
echo echo
end-if end-if
# Find if we are using jpa or mongo set persistenceProvider=jpa
if (result == mongo) of /subsystem=keycloak-server/spi=realm/:read-attribute(name=default-provider)
set persistenceProvider=mongo
else
set persistenceProvider=jpa
end-if
# Migrate from 2.1.0 to 2.2.0 # Migrate from 2.1.0 to 2.2.0
if (outcome == failed) of /extension=org.jboss.as.deployment-scanner/:read-resource if (outcome == failed) of /extension=org.jboss.as.deployment-scanner/:read-resource

View file

@ -55,12 +55,7 @@ if (result == []) of /subsystem=keycloak-server/:read-children-names(child-type=
echo echo
end-if end-if
# Find if we are using jpa or mongo set persistenceProvider=jpa
if (result == mongo) of /subsystem=keycloak-server/spi=realm/:read-attribute(name=default-provider)
set persistenceProvider=mongo
else
set persistenceProvider=jpa
end-if
# Migrate from 2.1.0 to 2.2.0 # Migrate from 2.1.0 to 2.2.0
if (outcome == failed) of /extension=org.jboss.as.deployment-scanner/:read-resource if (outcome == failed) of /extension=org.jboss.as.deployment-scanner/:read-resource

View file

@ -1,41 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Copyright 2016 Red Hat, Inc. and/or its affiliates
~ and other contributors as indicated by the @author tags.
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<module xmlns="urn:jboss:module:1.3" name="org.keycloak.keycloak-model-mongo">
<properties>
<property name="jboss.api" value="private"/>
</properties>
<resources>
<artifact name="${org.keycloak:keycloak-model-mongo}"/>
</resources>
<dependencies>
<module name="org.keycloak.keycloak-common"/>
<module name="org.keycloak.keycloak-core"/>
<module name="org.keycloak.keycloak-services"/>
<module name="org.keycloak.keycloak-server-spi"/>
<module name="org.keycloak.keycloak-server-spi-private"/>
<module name="org.mongodb.mongo-java-driver"/>
<module name="org.jboss.logging"/>
<module name="javax.api"/>
<module name="com.fasterxml.jackson.core.jackson-core"/>
<module name="com.fasterxml.jackson.core.jackson-annotations"/>
<module name="com.fasterxml.jackson.core.jackson-databind"/>
<module name="com.fasterxml.jackson.jaxrs.jackson-jaxrs-json-provider"/>
</dependencies>
</module>

View file

@ -34,7 +34,6 @@
<module name="org.keycloak.keycloak-server-spi" services="import"/> <module name="org.keycloak.keycloak-server-spi" services="import"/>
<module name="org.keycloak.keycloak-server-spi-private" services="import"/> <module name="org.keycloak.keycloak-server-spi-private" services="import"/>
<module name="org.keycloak.keycloak-model-jpa" services="import"/> <module name="org.keycloak.keycloak-model-jpa" services="import"/>
<module name="org.keycloak.keycloak-model-mongo" services="import"/>
<module name="org.keycloak.keycloak-model-infinispan" services="import"/> <module name="org.keycloak.keycloak-model-infinispan" services="import"/>
<module name="org.keycloak.keycloak-saml-core-public" services="import"/> <module name="org.keycloak.keycloak-saml-core-public" services="import"/>
<module name="org.keycloak.keycloak-saml-core" services="import"/> <module name="org.keycloak.keycloak-saml-core" services="import"/>

View file

@ -1,30 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Copyright 2016 Red Hat, Inc. and/or its affiliates
~ and other contributors as indicated by the @author tags.
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<module xmlns="urn:jboss:module:1.3" name="org.mongodb.mongo-java-driver">
<properties>
<property name="jboss.api" value="private"/>
</properties>
<resources>
<artifact name="${org.mongodb:mongo-java-driver}"/>
</resources>
<dependencies>
<module name="javax.api"/>
</dependencies>
</module>

View file

@ -1,16 +1,6 @@
Test with various databases Test with various databases
=========================== ===========================
MongoDB
-------
The Keycloak testsuite uses an embedded MongoDB when running tests so you don't have to have one running locally.
Run tests:
mvn install -Pmongo
MySQL MySQL
----- -----

View file

@ -45,18 +45,6 @@ For example to use the example themes run the server with:
**NOTE:** If `keycloak.theme.dir` is specified the default themes (base, rcue and keycloak) are loaded from the classpath **NOTE:** If `keycloak.theme.dir` is specified the default themes (base, rcue and keycloak) are loaded from the classpath
### Run server with Mongo model
To start a Keycloak server with identity model data persisted in Mongo database instead of default JPA/H2 you can run:
mvn exec:java -Pkeycloak-server -Dkeycloak.realm.provider=mongo -Dkeycloak.user.provider=mongo -Dkeycloak.audit.provider=mongo
By default it's using database `keycloak` on localhost/27017 and it uses already existing data from this DB (no cleanup of existing data during bootstrap). Assumption is that you already have DB running on localhost/27017 . Use system properties to configure things differently:
mvn exec:java -Pkeycloak-server -Dkeycloak.realm.provider=mongo -Dkeycloak.user.provider=mongo -Dkeycloak.eventStore.provider=mongo -Dkeycloak.connectionsMongo.host=localhost -Dkeycloak.connectionsMongo.port=27017 -Dkeycloak.connectionsMongo.db=keycloak -Dkeycloak.connectionsMongo.clearOnStartup=false
Note that if you are using Mongo model, it would mean that Mongo will be used for audit as well. You may need to use audit related properties for configuration of Mongo if you want to override default ones (For example keycloak.audit.mongo.host, keycloak.audit.mongo.port etc)
TOTP codes TOTP codes
---------- ----------

View file

@ -2,7 +2,7 @@ Updating Database Schema
======================== ========================
Keycloak supports automatically migrating the database to a new version. This is done by applying one or more change-sets Keycloak supports automatically migrating the database to a new version. This is done by applying one or more change-sets
to the existing database. This means if you need to do any changes to database schemas for JPA or Mongo you need to create to the existing database. This means if you need to do any changes to database schemas you need to create
a change-set that can transform the schema as well as any existing data. a change-set that can transform the schema as well as any existing data.
This includes changes to: This includes changes to:
@ -13,7 +13,7 @@ This includes changes to:
* Event entities * Event entities
Creating a JPA change-set Creating a change-set
------------------------- -------------------------
We use Liquibase to support updating the database. The change-sets are located in We use Liquibase to support updating the database. The change-sets are located in
@ -57,20 +57,6 @@ Once the server has started fully, stop it and run:
mvn -f testsuite/integration exec:java -Pkeycloak-server -Dkeycloak.connectionsJpa.url='jdbc:h2:keycloak' -Dkeycloak.connectionsJpa.databaseSchema='development-validate' mvn -f testsuite/integration exec:java -Pkeycloak-server -Dkeycloak.connectionsJpa.url='jdbc:h2:keycloak' -Dkeycloak.connectionsJpa.databaseSchema='development-validate'
Creating a Mongo change-set
---------------------------
As Mongo is schema-less it's significantly easier to create a change-set. You only need to create/delete collections as
needed, as well as update any indexes. You will also need to update existing data if required.
Mongo change-sets are written in Java and are located in the `connections/mongo` module, to add a new change-set create
a new class that implements `org.keycloak.connections.mongo.updater.updates.Update` the name of the class should be
`Update<version>` with `.` replaced with `_`.
You also need to add a reference to this file in `org.keycloak.connections.mongo.updater.DefaultMongoUpdaterProvider`.
It should be added last to the `DefaultMongoUpdaterProvider#updates` array.
Testing database migration Testing database migration
-------------------------- --------------------------

View file

@ -20,23 +20,22 @@ The changes you will likely make are when you need to add a new SPI, change an e
All elements in an SPI declaration are optional, but a full SPI declaration All elements in an SPI declaration are optional, but a full SPI declaration
looks like this: looks like this:
````xml ````xml
<spi name="dblock"> <spi name="example">
<default-provider>mongo</default-provider> <default-provider>myprovider</default-provider>
<provider name="jpa" enabled="true"> <provider name="myprovider" enabled="true">
<properties> <properties>
<property name="lockWaitTimeout" value="800"/> <property name="key" value="value"/>
</properties> </properties>
</provider> </provider>
<provider name="mongo" enabled="true"> <provider name="mypotherrovider" enabled="true">
<properties> <properties>
<property name="lockRecheckTime" value="2"/> <property name="key" value="value2"/>
<property name="lockWaitTimeout" value="600"/>
</properties> </properties>
</provider> </provider>
</spi> </spi>
```` ````
Here we have two providers defined for the SPI `dblock`. The Here we have two providers defined for the SPI `example`. The
`default-provider` is listed as `mongo`. However it is up to the SPI to decide how it will `default-provider` is listed as `myprovider`. However it is up to the SPI to decide how it will
treat this setting. Some SPIs allow more than one provider and some do not. So treat this setting. Some SPIs allow more than one provider and some do not. So
`default-provider` can help the SPI to choose. `default-provider` can help the SPI to choose.

View file

@ -1,75 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Copyright 2016 Red Hat, Inc. and/or its affiliates
~ and other contributors as indicated by the @author tags.
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>keycloak-parent</artifactId>
<groupId>org.keycloak</groupId>
<version>3.0.0.CR1-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>keycloak-model-mongo</artifactId>
<name>Keycloak Model Mongo</name>
<description/>
<properties>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.keycloak</groupId>
<artifactId>keycloak-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.keycloak</groupId>
<artifactId>keycloak-server-spi</artifactId>
</dependency>
<dependency>
<groupId>org.keycloak</groupId>
<artifactId>keycloak-server-spi-private</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.keycloak</groupId>
<artifactId>keycloak-services</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.jboss.logging</groupId>
<artifactId>jboss-logging</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-driver</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</project>

View file

@ -1,180 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.mongo.adapter;
import org.keycloak.authorization.AuthorizationProvider;
import org.keycloak.authorization.model.Policy;
import org.keycloak.authorization.model.Resource;
import org.keycloak.authorization.model.ResourceServer;
import org.keycloak.authorization.model.Scope;
import org.keycloak.authorization.mongo.entities.PolicyEntity;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.mongo.keycloak.adapters.AbstractMongoAdapter;
import org.keycloak.representations.idm.authorization.DecisionStrategy;
import org.keycloak.representations.idm.authorization.Logic;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public class PolicyAdapter extends AbstractMongoAdapter<PolicyEntity> implements Policy {
private final PolicyEntity entity;
private final AuthorizationProvider authorizationProvider;
public PolicyAdapter(PolicyEntity entity, MongoStoreInvocationContext invocationContext, AuthorizationProvider authorizationProvider) {
super(invocationContext);
this.entity = entity;
this.authorizationProvider = authorizationProvider;
}
@Override
protected PolicyEntity getMongoEntity() {
return entity;
}
@Override
public String getId() {
return getMongoEntity().getId();
}
@Override
public String getType() {
return getMongoEntity().getType();
}
@Override
public DecisionStrategy getDecisionStrategy() {
return getMongoEntity().getDecisionStrategy();
}
@Override
public void setDecisionStrategy(DecisionStrategy decisionStrategy) {
getMongoEntity().setDecisionStrategy(decisionStrategy);
updateMongoEntity();
}
@Override
public Logic getLogic() {
return getMongoEntity().getLogic();
}
@Override
public void setLogic(Logic logic) {
getMongoEntity().setLogic(logic);
updateMongoEntity();
}
@Override
public Map<String, String> getConfig() {
return getMongoEntity().getConfig();
}
@Override
public void setConfig(Map<String, String> config) {
getMongoEntity().setConfig(config);
updateMongoEntity();
}
@Override
public String getName() {
return getMongoEntity().getName();
}
@Override
public void setName(String name) {
getMongoEntity().setName(name);
updateMongoEntity();
}
@Override
public String getDescription() {
return getMongoEntity().getDescription();
}
@Override
public void setDescription(String description) {
getMongoEntity().setDescription(description);
updateMongoEntity();
}
@Override
public ResourceServer getResourceServer() {
return this.authorizationProvider.getStoreFactory().getResourceServerStore().findById(getMongoEntity().getResourceServerId());
}
@Override
public Set<Policy> getAssociatedPolicies() {
return getMongoEntity().getAssociatedPolicies().stream()
.map((Function<String, Policy>) id -> authorizationProvider.getStoreFactory().getPolicyStore().findById(id, getMongoEntity().getResourceServerId()))
.collect(Collectors.toSet());
}
@Override
public Set<Resource> getResources() {
return getMongoEntity().getResources().stream()
.map((Function<String, Resource>) id -> authorizationProvider.getStoreFactory().getResourceStore().findById(id, getMongoEntity().getResourceServerId()))
.collect(Collectors.toSet());
}
@Override
public Set<Scope> getScopes() {
return getMongoEntity().getScopes().stream()
.map((Function<String, Scope>) id -> authorizationProvider.getStoreFactory().getScopeStore().findById(id, getMongoEntity().getResourceServerId()))
.collect(Collectors.toSet());
}
@Override
public void addScope(Scope scope) {
getMongoEntity().addScope(scope.getId());
updateMongoEntity();
}
@Override
public void removeScope(Scope scope) {
getMongoEntity().removeScope(scope.getId());
updateMongoEntity();
}
@Override
public void addAssociatedPolicy(Policy associatedPolicy) {
getMongoEntity().addAssociatedPolicy(associatedPolicy.getId());
updateMongoEntity();
}
@Override
public void removeAssociatedPolicy(Policy associatedPolicy) {
getMongoEntity().removeAssociatedPolicy(associatedPolicy.getId());
updateMongoEntity();
}
@Override
public void addResource(Resource resource) {
getMongoEntity().addResource(resource.getId());
updateMongoEntity();
}
@Override
public void removeResource(Resource resource) {
getMongoEntity().removeResource(resource.getId());
updateMongoEntity();
}
}

View file

@ -1,106 +0,0 @@
package org.keycloak.authorization.mongo.adapter;
import org.keycloak.authorization.AuthorizationProvider;
import org.keycloak.authorization.model.Resource;
import org.keycloak.authorization.model.ResourceServer;
import org.keycloak.authorization.model.Scope;
import org.keycloak.authorization.mongo.entities.ResourceEntity;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.mongo.keycloak.adapters.AbstractMongoAdapter;
import java.util.List;
import java.util.Set;
import static java.util.stream.Collectors.toList;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public class ResourceAdapter extends AbstractMongoAdapter<ResourceEntity> implements Resource {
private final ResourceEntity entity;
private final AuthorizationProvider authorizationProvider;
public ResourceAdapter(ResourceEntity entity, MongoStoreInvocationContext invocationContext, AuthorizationProvider authorizationProvider) {
super(invocationContext);
this.entity = entity;
this.authorizationProvider = authorizationProvider;
}
@Override
public String getId() {
return getMongoEntity().getId();
}
@Override
public String getName() {
return getMongoEntity().getName();
}
@Override
public void setName(String name) {
getMongoEntity().setName(name);
updateMongoEntity();
}
@Override
public String getUri() {
return getMongoEntity().getUri();
}
@Override
public void setUri(String uri) {
getMongoEntity().setUri(uri);
updateMongoEntity();
}
@Override
public String getType() {
return getMongoEntity().getType();
}
@Override
public void setType(String type) {
getMongoEntity().setType(type);
updateMongoEntity();
}
@Override
public List<Scope> getScopes() {
return getMongoEntity().getScopes().stream()
.map(id -> authorizationProvider.getStoreFactory().getScopeStore().findById(id, getResourceServer().getId()))
.collect(toList());
}
@Override
public String getIconUri() {
return getMongoEntity().getIconUri();
}
@Override
public void setIconUri(String iconUri) {
getMongoEntity().setIconUri(iconUri);
updateMongoEntity();
}
@Override
public ResourceServer getResourceServer() {
return this.authorizationProvider.getStoreFactory().getResourceServerStore().findById(getMongoEntity().getResourceServerId());
}
@Override
public String getOwner() {
return getMongoEntity().getOwner();
}
@Override
public void updateScopes(Set<Scope> scopes) {
getMongoEntity().updateScopes(scopes);
updateMongoEntity();
}
@Override
protected ResourceEntity getMongoEntity() {
return this.entity;
}
}

View file

@ -1,73 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.mongo.adapter;
import org.keycloak.authorization.model.ResourceServer;
import org.keycloak.authorization.mongo.entities.ResourceServerEntity;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.mongo.keycloak.adapters.AbstractMongoAdapter;
import org.keycloak.representations.idm.authorization.PolicyEnforcementMode;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public class ResourceServerAdapter extends AbstractMongoAdapter<ResourceServerEntity> implements ResourceServer{
private final ResourceServerEntity entity;
public ResourceServerAdapter(ResourceServerEntity entity, MongoStoreInvocationContext invocationContext) {
super(invocationContext);
this.entity = entity;
}
@Override
public String getId() {
return getMongoEntity().getId();
}
@Override
public String getClientId() {
return getMongoEntity().getClientId();
}
@Override
public boolean isAllowRemoteResourceManagement() {
return getMongoEntity().isAllowRemoteResourceManagement();
}
@Override
public void setAllowRemoteResourceManagement(boolean allowRemoteResourceManagement) {
getMongoEntity().setAllowRemoteResourceManagement(allowRemoteResourceManagement);
updateMongoEntity();
}
@Override
public PolicyEnforcementMode getPolicyEnforcementMode() {
return getMongoEntity().getPolicyEnforcementMode();
}
@Override
public void setPolicyEnforcementMode(PolicyEnforcementMode enforcementMode) {
getMongoEntity().setPolicyEnforcementMode(enforcementMode);
updateMongoEntity();
}
@Override
protected ResourceServerEntity getMongoEntity() {
return this.entity;
}
}

View file

@ -1,60 +0,0 @@
package org.keycloak.authorization.mongo.adapter;
import org.keycloak.authorization.AuthorizationProvider;
import org.keycloak.authorization.model.ResourceServer;
import org.keycloak.authorization.model.Scope;
import org.keycloak.authorization.mongo.entities.ScopeEntity;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.mongo.keycloak.adapters.AbstractMongoAdapter;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public class ScopeAdapter extends AbstractMongoAdapter<ScopeEntity> implements Scope {
private final ScopeEntity entity;
private final AuthorizationProvider authorizationProvider;
public ScopeAdapter(ScopeEntity entity, MongoStoreInvocationContext invocationContext, AuthorizationProvider authorizationProvider) {
super(invocationContext);
this.entity = entity;
this.authorizationProvider = authorizationProvider;
}
@Override
public String getId() {
return getMongoEntity().getId();
}
@Override
public String getName() {
return getMongoEntity().getName();
}
@Override
public void setName(String name) {
getMongoEntity().setName(name);
updateMongoEntity();
}
@Override
public String getIconUri() {
return getMongoEntity().getIconUri();
}
@Override
public void setIconUri(String iconUri) {
getMongoEntity().setIconUri(iconUri);
updateMongoEntity();
}
@Override
public ResourceServer getResourceServer() {
return this.authorizationProvider.getStoreFactory().getResourceServerStore().findById(getMongoEntity().getResourceServerId());
}
@Override
protected ScopeEntity getMongoEntity() {
return this.entity;
}
}

View file

@ -1,166 +0,0 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2016 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.mongo.entities;
import org.keycloak.connections.mongo.api.MongoCollection;
import org.keycloak.connections.mongo.api.MongoIdentifiableEntity;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.mongo.keycloak.entities.AbstractIdentifiableEntity;
import org.keycloak.representations.idm.authorization.DecisionStrategy;
import org.keycloak.representations.idm.authorization.Logic;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
@MongoCollection(collectionName = "policies")
public class PolicyEntity extends AbstractIdentifiableEntity implements MongoIdentifiableEntity {
private String name;
private String description;
private String type;
private DecisionStrategy decisionStrategy = DecisionStrategy.UNANIMOUS;
private Logic logic = Logic.POSITIVE;
private Map<String, String> config = new HashMap();
private String resourceServerId;
private Set<String> associatedPolicies = new HashSet<>();
private Set<String> resources = new HashSet<>();
private Set<String> scopes = new HashSet<>();
public String getType() {
return this.type;
}
public void setType(String type) {
this.type = type;
}
public DecisionStrategy getDecisionStrategy() {
return this.decisionStrategy;
}
public void setDecisionStrategy(DecisionStrategy decisionStrategy) {
this.decisionStrategy = decisionStrategy;
}
public Logic getLogic() {
return this.logic;
}
public void setLogic(Logic logic) {
this.logic = logic;
}
public Map<String, String> getConfig() {
return this.config;
}
public void setConfig(Map<String, String> config) {
this.config = config;
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return this.description;
}
public void setDescription(String description) {
this.description = description;
}
public String getResourceServerId() {
return this.resourceServerId;
}
public void setResourceServerId(String resourceServerId) {
this.resourceServerId = resourceServerId;
}
public Set<String> getAssociatedPolicies() {
return this.associatedPolicies;
}
public void setAssociatedPolicies(Set<String> associatedPolicies) {
this.associatedPolicies = associatedPolicies;
}
public Set<String> getResources() {
return this.resources;
}
public void setResources(Set<String> resources) {
this.resources = resources;
}
public Set<String> getScopes() {
return this.scopes;
}
public void setScopes(Set<String> scopes) {
this.scopes = scopes;
}
public void addScope(String scopeId) {
getScopes().add(scopeId);
}
public void removeScope(String scopeId) {
getScopes().remove(scopeId);
}
public void addAssociatedPolicy(String policyId) {
getAssociatedPolicies().add(policyId);
}
public void removeAssociatedPolicy(String policyId) {
getAssociatedPolicies().remove(policyId);
}
public void addResource(String resourceId) {
getResources().add(resourceId);
}
public void removeResource(String resourceId) {
getResources().remove(resourceId);
}
public void afterRemove(MongoStoreInvocationContext invocationContext) {
}
}

View file

@ -1,142 +0,0 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2016 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.mongo.entities;
import org.keycloak.authorization.model.Scope;
import org.keycloak.connections.mongo.api.MongoCollection;
import org.keycloak.connections.mongo.api.MongoIdentifiableEntity;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.mongo.keycloak.entities.AbstractIdentifiableEntity;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
@MongoCollection(collectionName = "resources")
public class ResourceEntity extends AbstractIdentifiableEntity implements MongoIdentifiableEntity {
private String name;
private String uri;
private String type;
private String iconUri;
private String owner;
private String resourceServerId;
private List<String> scopes = new ArrayList<>();
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getUri() {
return uri;
}
public void setUri(String uri) {
this.uri = uri;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public List<String> getScopes() {
return this.scopes;
}
public void setScopes(List<String> scopes) {
this.scopes = scopes;
}
public String getIconUri() {
return iconUri;
}
public void setIconUri(String iconUri) {
this.iconUri = iconUri;
}
public String getResourceServerId() {
return resourceServerId;
}
public void setResourceServerId(String resourceServerId) {
this.resourceServerId = resourceServerId;
}
public String getOwner() {
return this.owner;
}
public void setOwner(String owner) {
this.owner = owner;
}
public void updateScopes(Set<Scope> toUpdate) {
for (Scope scope : toUpdate) {
boolean hasScope = false;
for (String existingScope : this.scopes) {
if (existingScope.equals(scope.getId())) {
hasScope = true;
}
}
if (!hasScope) {
this.scopes.add(scope.getId());
}
}
for (String scopeId : new HashSet<String>(this.scopes)) {
boolean hasScope = false;
for (Scope scope : toUpdate) {
if (scopeId.equals(scope.getId())) {
hasScope = true;
}
}
if (!hasScope) {
this.scopes.remove(scopeId);
}
}
}
@Override
public void afterRemove(MongoStoreInvocationContext invocationContext) {
}
}

View file

@ -1,67 +0,0 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2016 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.mongo.entities;
import org.keycloak.connections.mongo.api.MongoCollection;
import org.keycloak.connections.mongo.api.MongoIdentifiableEntity;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.mongo.keycloak.entities.AbstractIdentifiableEntity;
import org.keycloak.representations.idm.authorization.PolicyEnforcementMode;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
@MongoCollection(collectionName = "resource-servers")
public class ResourceServerEntity extends AbstractIdentifiableEntity implements MongoIdentifiableEntity {
private String clientId;
private boolean allowRemoteResourceManagement;
private PolicyEnforcementMode policyEnforcementMode = PolicyEnforcementMode.ENFORCING;
public String getClientId() {
return this.clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public boolean isAllowRemoteResourceManagement() {
return this.allowRemoteResourceManagement;
}
public void setAllowRemoteResourceManagement(boolean allowRemoteResourceManagement) {
this.allowRemoteResourceManagement = allowRemoteResourceManagement;
}
public PolicyEnforcementMode getPolicyEnforcementMode() {
return this.policyEnforcementMode;
}
public void setPolicyEnforcementMode(PolicyEnforcementMode policyEnforcementMode) {
this.policyEnforcementMode = policyEnforcementMode;
}
@Override
public void afterRemove(MongoStoreInvocationContext invocationContext) {
}
}

View file

@ -1,66 +0,0 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2016 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.mongo.entities;
import org.keycloak.connections.mongo.api.MongoCollection;
import org.keycloak.connections.mongo.api.MongoIdentifiableEntity;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.mongo.keycloak.entities.AbstractIdentifiableEntity;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
@MongoCollection(collectionName = "scopes")
public class ScopeEntity extends AbstractIdentifiableEntity implements MongoIdentifiableEntity {
private String name;
private String iconUri;
private String resourceServerId;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getIconUri() {
return iconUri;
}
public void setIconUri(String iconUri) {
this.iconUri = iconUri;
}
public String getResourceServerId() {
return resourceServerId;
}
public void setResourceServerId(String resourceServerId) {
this.resourceServerId = resourceServerId;
}
@Override
public void afterRemove(MongoStoreInvocationContext invocationContext) {
}
}

View file

@ -1,52 +0,0 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2016 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.mongo.store;
import org.keycloak.Config;
import org.keycloak.authorization.AuthorizationProvider;
import org.keycloak.authorization.store.AuthorizationStoreFactory;
import org.keycloak.authorization.store.StoreFactory;
import org.keycloak.connections.mongo.MongoConnectionProvider;
import org.keycloak.models.KeycloakSession;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public class MongoAuthorizationStoreFactory implements AuthorizationStoreFactory {
@Override
public StoreFactory create(KeycloakSession session) {
MongoConnectionProvider connection = session.getProvider(MongoConnectionProvider.class);
return new MongoStoreFactory(connection.getInvocationContext(), session);
}
@Override
public void init(Config.Scope config) {
}
@Override
public void close() {
}
@Override
public String getId() {
return "mongo";
}
}

View file

@ -1,201 +0,0 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2016 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.mongo.store;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import com.mongodb.QueryBuilder;
import org.keycloak.authorization.AuthorizationProvider;
import org.keycloak.authorization.model.Policy;
import org.keycloak.authorization.model.ResourceServer;
import org.keycloak.authorization.mongo.adapter.PolicyAdapter;
import org.keycloak.authorization.mongo.entities.PolicyEntity;
import org.keycloak.authorization.store.PolicyStore;
import org.keycloak.connections.mongo.api.MongoStore;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.utils.KeycloakModelUtils;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import static java.util.stream.Collectors.toList;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public class MongoPolicyStore implements PolicyStore {
private final MongoStoreInvocationContext invocationContext;
private final AuthorizationProvider authorizationProvider;
public MongoPolicyStore(MongoStoreInvocationContext invocationContext, AuthorizationProvider authorizationProvider) {
this.invocationContext = invocationContext;
this.authorizationProvider = authorizationProvider;
}
@Override
public Policy create(String name, String type, ResourceServer resourceServer) {
PolicyEntity entity = new PolicyEntity();
entity.setId(KeycloakModelUtils.generateId());
entity.setName(name);
entity.setType(type);
entity.setResourceServerId(resourceServer.getId());
getMongoStore().insertEntity(entity, getInvocationContext());
return new PolicyAdapter(entity, getInvocationContext(), this.authorizationProvider) ;
}
@Override
public void delete(String id) {
getMongoStore().removeEntity(PolicyEntity.class, id, getInvocationContext());
}
@Override
public Policy findById(String id, String resourceServerId) {
PolicyEntity entity = getMongoStore().loadEntity(PolicyEntity.class, id, getInvocationContext());
if (entity == null) {
return null;
}
return new PolicyAdapter(entity, getInvocationContext(), this.authorizationProvider);
}
@Override
public Policy findByName(String name, String resourceServerId) {
DBObject query = new QueryBuilder()
.and("resourceServerId").is(resourceServerId)
.and("name").is(name)
.get();
return getMongoStore().loadEntities(PolicyEntity.class, query, getInvocationContext()).stream()
.map(policyEntity -> findById(policyEntity.getId(), resourceServerId)).findFirst().orElse(null);
}
@Override
public List<Policy> findByResourceServer(String resourceServerId) {
DBObject query = new QueryBuilder()
.and("resourceServerId").is(resourceServerId)
.get();
return getMongoStore().loadEntities(PolicyEntity.class, query, getInvocationContext()).stream()
.map(policyEntity -> findById(policyEntity.getId(), resourceServerId))
.collect(toList());
}
@Override
public List<Policy> findByResourceServer(Map<String, String[]> attributes, String resourceServerId, int firstResult, int maxResult) {
QueryBuilder queryBuilder = new QueryBuilder()
.and("resourceServerId").is(resourceServerId);
attributes.forEach((name, value) -> {
if ("permission".equals(name)) {
if (Boolean.valueOf(value[0])) {
queryBuilder.and("type").in(new String[] {"resource", "scope"});
} else {
queryBuilder.and("type").notIn(new String[] {"resource", "scope"});
}
} else if ("id".equals(name)) {
queryBuilder.and("_id").in(value);
} else {
queryBuilder.and(name).regex(Pattern.compile(".*" + value[0] + ".*", Pattern.CASE_INSENSITIVE));
}
});
DBObject sort = new BasicDBObject("name", 1);
return getMongoStore().loadEntities(PolicyEntity.class, queryBuilder.get(), sort, firstResult, maxResult, invocationContext).stream()
.map(policy -> findById(policy.getId(), resourceServerId)).collect(toList());
}
@Override
public List<Policy> findByResource(String resourceId, String resourceServerId) {
DBObject query = new QueryBuilder()
.and("resourceServerId").is(resourceServerId)
.and("resources").is(resourceId)
.get();
return getMongoStore().loadEntities(PolicyEntity.class, query, getInvocationContext()).stream()
.map(policyEntity -> findById(policyEntity.getId(), resourceServerId))
.collect(toList());
}
@Override
public List<Policy> findByResourceType(String resourceType, String resourceServerId) {
DBObject query = new QueryBuilder()
.and("resourceServerId").is(resourceServerId)
.get();
return getMongoStore().loadEntities(PolicyEntity.class, query, getInvocationContext()).stream()
.filter(policyEntity -> {
String defaultResourceType = policyEntity.getConfig().get("defaultResourceType");
return defaultResourceType != null && defaultResourceType.equals(resourceType);
})
.map(policyEntity -> findById(policyEntity.getId(), resourceServerId))
.collect(toList());
}
@Override
public List<Policy> findByScopeIds(List<String> scopeIds, String resourceServerId) {
DBObject query = new QueryBuilder()
.and("resourceServerId").is(resourceServerId)
.and("scopes").in(scopeIds)
.get();
return getMongoStore().loadEntities(PolicyEntity.class, query, getInvocationContext()).stream()
.map(policyEntity -> findById(policyEntity.getId(), resourceServerId))
.collect(toList());
}
@Override
public List<Policy> findByType(String type, String resourceServerId) {
DBObject query = new QueryBuilder()
.and("resourceServerId").is(resourceServerId)
.and("type").is(type)
.get();
return getMongoStore().loadEntities(PolicyEntity.class, query, getInvocationContext()).stream()
.map(policyEntity -> findById(policyEntity.getId(), resourceServerId))
.collect(toList());
}
@Override
public List<Policy> findDependentPolicies(String policyId, String resourceServerId) {
DBObject query = new QueryBuilder()
.and("resourceServerId").is(resourceServerId)
.and("associatedPolicies").is(policyId)
.get();
return getMongoStore().loadEntities(PolicyEntity.class, query, getInvocationContext()).stream()
.map(policyEntity -> findById(policyEntity.getId(), resourceServerId))
.collect(toList());
}
private MongoStoreInvocationContext getInvocationContext() {
return this.invocationContext;
}
private MongoStore getMongoStore() {
return getInvocationContext().getMongoStore();
}
}

View file

@ -1,90 +0,0 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2016 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.mongo.store;
import com.mongodb.DBObject;
import com.mongodb.QueryBuilder;
import org.keycloak.authorization.AuthorizationProvider;
import org.keycloak.authorization.model.ResourceServer;
import org.keycloak.authorization.mongo.adapter.ResourceServerAdapter;
import org.keycloak.authorization.mongo.entities.ResourceServerEntity;
import org.keycloak.authorization.store.ResourceServerStore;
import org.keycloak.connections.mongo.api.MongoStore;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.utils.KeycloakModelUtils;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public class MongoResourceServerStore implements ResourceServerStore {
private final MongoStoreInvocationContext invocationContext;
private final AuthorizationProvider authorizationProvider;
public MongoResourceServerStore(MongoStoreInvocationContext invocationContext, AuthorizationProvider authorizationProvider) {
this.invocationContext = invocationContext;
this.authorizationProvider = authorizationProvider;
}
@Override
public ResourceServer create(String clientId) {
ResourceServerEntity entity = new ResourceServerEntity();
entity.setId(KeycloakModelUtils.generateId());
entity.setClientId(clientId);
getMongoStore().insertEntity(entity, getInvocationContext());
return new ResourceServerAdapter(entity, getInvocationContext());
}
@Override
public void delete(String id) {
getMongoStore().removeEntity(ResourceServerEntity.class, id, getInvocationContext());
}
@Override
public ResourceServer findById(String id) {
ResourceServerEntity entity = getMongoStore().loadEntity(ResourceServerEntity.class, id, getInvocationContext());
if (entity == null) {
return null;
}
return new ResourceServerAdapter(entity, getInvocationContext());
}
@Override
public ResourceServer findByClient(String clientId) {
DBObject query = new QueryBuilder()
.and("clientId").is(clientId)
.get();
return getMongoStore().loadEntities(ResourceServerEntity.class, query, getInvocationContext()).stream()
.map(resourceServer -> findById(resourceServer.getId())).findFirst().orElse(null);
}
private MongoStoreInvocationContext getInvocationContext() {
return this.invocationContext;
}
private MongoStore getMongoStore() {
return getInvocationContext().getMongoStore();
}
}

View file

@ -1,176 +0,0 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2016 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.mongo.store;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import com.mongodb.QueryBuilder;
import org.keycloak.authorization.AuthorizationProvider;
import org.keycloak.authorization.model.Resource;
import org.keycloak.authorization.model.ResourceServer;
import org.keycloak.authorization.mongo.adapter.ResourceAdapter;
import org.keycloak.authorization.mongo.entities.ResourceEntity;
import org.keycloak.authorization.store.ResourceStore;
import org.keycloak.connections.mongo.api.MongoStore;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.utils.KeycloakModelUtils;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import static java.util.stream.Collectors.toList;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public class MongoResourceStore implements ResourceStore {
private final MongoStoreInvocationContext invocationContext;
private final AuthorizationProvider authorizationProvider;
public MongoResourceStore(MongoStoreInvocationContext invocationContext, AuthorizationProvider authorizationProvider) {
this.invocationContext = invocationContext;
this.authorizationProvider = authorizationProvider;
}
@Override
public Resource create(String name, ResourceServer resourceServer, String owner) {
ResourceEntity entity = new ResourceEntity();
entity.setId(KeycloakModelUtils.generateId());
entity.setName(name);
entity.setResourceServerId(resourceServer.getId());
entity.setOwner(owner);
getMongoStore().insertEntity(entity, getInvocationContext());
return new ResourceAdapter(entity, getInvocationContext(), this.authorizationProvider);
}
@Override
public void delete(String id) {
getMongoStore().removeEntity(ResourceEntity.class, id, getInvocationContext());
}
@Override
public Resource findById(String id, String resourceServerId) {
ResourceEntity entity = getMongoStore().loadEntity(ResourceEntity.class, id, getInvocationContext());
if (entity == null) {
return null;
}
return new ResourceAdapter(entity, getInvocationContext(), this.authorizationProvider);
}
@Override
public List<Resource> findByOwner(String ownerId, String resourceServerId) {
DBObject query = new QueryBuilder()
.and("resourceServerId").is(resourceServerId)
.and("owner").is(ownerId)
.get();
return getMongoStore().loadEntities(ResourceEntity.class, query, getInvocationContext()).stream()
.map(scope -> findById(scope.getId(), resourceServerId)).collect(toList());
}
@Override
public List<Resource> findByUri(String uri, String resourceServerId) {
DBObject query = new QueryBuilder()
.and("resourceServerId").is(resourceServerId)
.and("uri").is(uri)
.get();
return getMongoStore().loadEntities(ResourceEntity.class, query, getInvocationContext()).stream()
.map(scope -> findById(scope.getId(), resourceServerId)).collect(toList());
}
@Override
public List findByResourceServer(String resourceServerId) {
DBObject query = new QueryBuilder()
.and("resourceServerId").is(resourceServerId)
.get();
return getMongoStore().loadEntities(ResourceEntity.class, query, getInvocationContext()).stream()
.map(scope -> findById(scope.getId(), resourceServerId)).collect(toList());
}
@Override
public List<Resource> findByResourceServer(Map<String, String[]> attributes, String resourceServerId, int firstResult, int maxResult) {
QueryBuilder queryBuilder = new QueryBuilder()
.and("resourceServerId").is(resourceServerId);
attributes.forEach((name, value) -> {
if ("scope".equals(name)) {
queryBuilder.and("scopes").in(value);
} else {
queryBuilder.and(name).regex(Pattern.compile(".*" + value[0] + ".*", Pattern.CASE_INSENSITIVE));
}
});
DBObject sort = new BasicDBObject("name", 1);
return getMongoStore().loadEntities(ResourceEntity.class, queryBuilder.get(), sort, firstResult, maxResult, invocationContext).stream()
.map(scope -> findById(scope.getId(), resourceServerId)).collect(toList());
}
@Override
public List<Resource> findByScope(List<String> id, String resourceServerId) {
DBObject query = new QueryBuilder()
.and("resourceServerId").is(resourceServerId)
.and("scopes").in(id)
.get();
return getMongoStore().loadEntities(ResourceEntity.class, query, getInvocationContext()).stream()
.map(policyEntity -> findById(policyEntity.getId(), resourceServerId))
.collect(toList());
}
@Override
public Resource findByName(String name, String resourceServerId) {
DBObject query = new QueryBuilder()
.and("resourceServerId").is(resourceServerId)
.and("name").is(name)
.get();
return getMongoStore().loadEntities(ResourceEntity.class, query, getInvocationContext()).stream()
.map(policyEntity -> findById(policyEntity.getId(), resourceServerId)).findFirst().orElse(null);
}
@Override
public List<Resource> findByType(String type, String resourceServerId) {
DBObject query = new QueryBuilder()
.and("resourceServerId").is(resourceServerId)
.and("type").is(type)
.get();
return getMongoStore().loadEntities(ResourceEntity.class, query, getInvocationContext()).stream()
.map(policyEntity -> findById(policyEntity.getId(), resourceServerId))
.collect(toList());
}
private MongoStoreInvocationContext getInvocationContext() {
return this.invocationContext;
}
private MongoStore getMongoStore() {
return getInvocationContext().getMongoStore();
}
}

View file

@ -1,126 +0,0 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2016 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.mongo.store;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import com.mongodb.QueryBuilder;
import org.keycloak.authorization.AuthorizationProvider;
import org.keycloak.authorization.model.ResourceServer;
import org.keycloak.authorization.model.Scope;
import org.keycloak.authorization.mongo.adapter.ScopeAdapter;
import org.keycloak.authorization.mongo.entities.ScopeEntity;
import org.keycloak.authorization.store.ScopeStore;
import org.keycloak.connections.mongo.api.MongoStore;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.utils.KeycloakModelUtils;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import static java.util.stream.Collectors.toList;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public class MongoScopeStore implements ScopeStore {
private final MongoStoreInvocationContext invocationContext;
private final AuthorizationProvider authorizationProvider;
public MongoScopeStore(MongoStoreInvocationContext invocationContext, AuthorizationProvider authorizationProvider) {
this.invocationContext = invocationContext;
this.authorizationProvider = authorizationProvider;
}
@Override
public Scope create(final String name, final ResourceServer resourceServer) {
ScopeEntity entity = new ScopeEntity();
entity.setId(KeycloakModelUtils.generateId());
entity.setName(name);
entity.setResourceServerId(resourceServer.getId());
getMongoStore().insertEntity(entity, getInvocationContext());
return new ScopeAdapter(entity, getInvocationContext(), this.authorizationProvider);
}
@Override
public void delete(String id) {
getMongoStore().removeEntity(ScopeEntity.class, id, getInvocationContext());
}
@Override
public Scope findById(String id, String resourceServerId) {
ScopeEntity entity = getMongoStore().loadEntity(ScopeEntity.class, id, getInvocationContext());
if (entity == null) {
return null;
}
return new ScopeAdapter(entity, getInvocationContext(), this.authorizationProvider);
}
@Override
public Scope findByName(String name, String resourceServerId) {
DBObject query = new QueryBuilder()
.and("resourceServerId").is(resourceServerId)
.and("name").is(name)
.get();
return getMongoStore().loadEntities(ScopeEntity.class, query, getInvocationContext()).stream()
.map(scope -> findById(scope.getId(), scope.getResourceServerId())).findFirst().orElse(null);
}
@Override
public List<Scope> findByResourceServer(String resourceServerId) {
DBObject query = new QueryBuilder()
.and("resourceServerId").is(resourceServerId)
.get();
return getMongoStore().loadEntities(ScopeEntity.class, query, getInvocationContext()).stream()
.map(scope -> findById(scope.getId(), scope.getResourceServerId()))
.collect(toList());
}
@Override
public List<Scope> findByResourceServer(Map<String, String[]> attributes, String resourceServerId, int firstResult, int maxResult) {
QueryBuilder queryBuilder = new QueryBuilder()
.and("resourceServerId").is(resourceServerId);
attributes.forEach((name, value) -> {
queryBuilder.and(name).regex(Pattern.compile(".*" + value[0] + ".*", Pattern.CASE_INSENSITIVE));
});
DBObject sort = new BasicDBObject("name", 1);
return getMongoStore().loadEntities(ScopeEntity.class, queryBuilder.get(), sort, firstResult, maxResult, invocationContext).stream()
.map(scope -> findById(scope.getId(), scope.getResourceServerId())).collect(toList());
}
private MongoStoreInvocationContext getInvocationContext() {
return this.invocationContext;
}
private MongoStore getMongoStore() {
return getInvocationContext().getMongoStore();
}
}

View file

@ -1,71 +0,0 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2016 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.mongo.store;
import org.keycloak.authorization.AuthorizationProvider;
import org.keycloak.authorization.store.PolicyStore;
import org.keycloak.authorization.store.ResourceServerStore;
import org.keycloak.authorization.store.ResourceStore;
import org.keycloak.authorization.store.ScopeStore;
import org.keycloak.authorization.store.StoreFactory;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.KeycloakSession;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public class MongoStoreFactory implements StoreFactory {
private final MongoStoreInvocationContext invocationContext;
private final KeycloakSession session;
public MongoStoreFactory(MongoStoreInvocationContext invocationContext, KeycloakSession session) {
this.invocationContext = invocationContext;
this.session = session;
}
@Override
public PolicyStore getPolicyStore() {
return new MongoPolicyStore(this.invocationContext, getAuthorizationProvider());
}
@Override
public ResourceServerStore getResourceServerStore() {
return new MongoResourceServerStore(this.invocationContext, getAuthorizationProvider());
}
@Override
public ResourceStore getResourceStore() {
return new MongoResourceStore(this.invocationContext, getAuthorizationProvider());
}
@Override
public ScopeStore getScopeStore() {
return new MongoScopeStore(this.invocationContext, getAuthorizationProvider());
}
private AuthorizationProvider getAuthorizationProvider() {
return session.getProvider(AuthorizationProvider.class);
}
@Override
public void close() {
}
}

View file

@ -1,354 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo;
import com.mongodb.DB;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import com.mongodb.MongoClientURI;
import com.mongodb.MongoCredential;
import com.mongodb.ServerAddress;
import org.jboss.logging.Logger;
import org.keycloak.Config;
import org.keycloak.connections.mongo.api.MongoStore;
import org.keycloak.connections.mongo.impl.MongoStoreImpl;
import org.keycloak.connections.mongo.impl.context.TransactionMongoStoreInvocationContext;
import org.keycloak.connections.mongo.updater.MongoUpdaterProvider;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.KeycloakSessionFactory;
import org.keycloak.models.KeycloakSessionTask;
import org.keycloak.models.dblock.DBLockManager;
import org.keycloak.models.dblock.DBLockProvider;
import org.keycloak.models.utils.KeycloakModelUtils;
import org.keycloak.provider.ServerInfoAwareProviderFactory;
import javax.net.ssl.SSLSocketFactory;
import java.lang.reflect.Method;
import java.net.UnknownHostException;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class DefaultMongoConnectionFactoryProvider implements MongoConnectionProviderFactory, ServerInfoAwareProviderFactory {
enum MigrationStrategy {
UPDATE, VALIDATE
}
// TODO Make it dynamic
private String[] entities = new String[]{
"org.keycloak.models.mongo.keycloak.entities.MongoRealmEntity",
"org.keycloak.models.mongo.keycloak.entities.MongoUserEntity",
"org.keycloak.models.mongo.keycloak.entities.MongoRoleEntity",
"org.keycloak.models.mongo.keycloak.entities.MongoGroupEntity",
"org.keycloak.models.mongo.keycloak.entities.MongoClientEntity",
"org.keycloak.models.mongo.keycloak.entities.MongoClientTemplateEntity",
"org.keycloak.models.mongo.keycloak.entities.MongoUserConsentEntity",
"org.keycloak.models.mongo.keycloak.entities.MongoMigrationModelEntity",
"org.keycloak.models.mongo.keycloak.entities.MongoOnlineUserSessionEntity",
"org.keycloak.models.mongo.keycloak.entities.MongoOfflineUserSessionEntity",
"org.keycloak.models.mongo.keycloak.entities.IdentityProviderEntity",
"org.keycloak.models.mongo.keycloak.entities.ClientIdentityProviderMappingEntity",
"org.keycloak.models.mongo.keycloak.entities.RequiredCredentialEntity",
"org.keycloak.models.mongo.keycloak.entities.CredentialEntity",
"org.keycloak.models.mongo.keycloak.entities.FederatedIdentityEntity",
"org.keycloak.models.mongo.keycloak.entities.UserFederationProviderEntity",
"org.keycloak.models.mongo.keycloak.entities.UserFederationMapperEntity",
"org.keycloak.models.mongo.keycloak.entities.ProtocolMapperEntity",
"org.keycloak.models.mongo.keycloak.entities.IdentityProviderMapperEntity",
"org.keycloak.models.mongo.keycloak.entities.AuthenticationExecutionEntity",
"org.keycloak.models.mongo.keycloak.entities.AuthenticationFlowEntity",
"org.keycloak.models.mongo.keycloak.entities.AuthenticatorConfigEntity",
"org.keycloak.models.mongo.keycloak.entities.RequiredActionProviderEntity",
"org.keycloak.models.mongo.keycloak.entities.PersistentUserSessionEntity",
"org.keycloak.models.mongo.keycloak.entities.PersistentClientSessionEntity",
"org.keycloak.models.mongo.keycloak.entities.ComponentEntity",
"org.keycloak.storage.mongo.entity.FederatedUser",
"org.keycloak.authorization.mongo.entities.PolicyEntity",
"org.keycloak.authorization.mongo.entities.ResourceEntity",
"org.keycloak.authorization.mongo.entities.ResourceServerEntity",
"org.keycloak.authorization.mongo.entities.ScopeEntity"
};
private static final Logger logger = Logger.getLogger(DefaultMongoConnectionFactoryProvider.class);
private static final int STATE_BEFORE_INIT = 0; // Even before MongoClient is created
private static final int STATE_BEFORE_UPDATE = 1; // Mongo client was created, but DB is not yet updated to last version
private static final int STATE_AFTER_UPDATE = 2; // Mongo client was created and DB updated. DB is fully initialized now
private volatile int state = STATE_BEFORE_INIT;
private MongoClient client;
private MongoStore mongoStore;
private DB db;
protected Config.Scope config;
private Map<String,String> operationalInfo;
@Override
public void init(Config.Scope config) {
this.config = config;
}
@Override
public void postInit(KeycloakSessionFactory factory) {
}
@Override
public DB getDBBeforeUpdate() {
lazyInitBeforeUpdate();
return db;
}
private void lazyInitBeforeUpdate() {
if (state == STATE_BEFORE_INIT) {
synchronized (this) {
if (state == STATE_BEFORE_INIT) {
try {
this.client = createMongoClient();
String dbName = config.get("db", "keycloak");
this.db = client.getDB(dbName);
state = STATE_BEFORE_UPDATE;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
}
@Override
public MongoConnectionProvider create(KeycloakSession session) {
lazyInit(session);
TransactionMongoStoreInvocationContext invocationContext = new TransactionMongoStoreInvocationContext(mongoStore);
session.getTransactionManager().enlist(new MongoKeycloakTransaction(invocationContext));
return new DefaultMongoConnectionProvider(db, mongoStore, invocationContext);
}
private void lazyInit(KeycloakSession session) {
lazyInitBeforeUpdate();
if (state == STATE_BEFORE_UPDATE) {
synchronized (this) {
if (state == STATE_BEFORE_UPDATE) {
try {
update(session);
this.mongoStore = new MongoStoreImpl(db, getManagedEntities());
state = STATE_AFTER_UPDATE;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
}
private void update(KeycloakSession session) {
MigrationStrategy strategy = getMigrationStrategy();
MongoUpdaterProvider mongoUpdater = session.getProvider(MongoUpdaterProvider.class);
if (mongoUpdater == null) {
throw new RuntimeException("Can't update database: Mongo updater provider not found");
}
DBLockProvider dbLock = new DBLockManager(session).getDBLock();
if (dbLock.hasLock()) {
updateOrValidateDB(strategy, session, mongoUpdater);
} else {
logger.trace("Don't have DBLock retrieved before upgrade. Needs to acquire lock first in separate transaction");
KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), new KeycloakSessionTask() {
@Override
public void run(KeycloakSession lockSession) {
DBLockManager dbLockManager = new DBLockManager(lockSession);
DBLockProvider dbLock2 = dbLockManager.getDBLock();
dbLock2.waitForLock();
try {
updateOrValidateDB(strategy, session, mongoUpdater);
} finally {
dbLock2.releaseLock();
}
}
});
}
}
private Class[] getManagedEntities() throws ClassNotFoundException {
Class[] entityClasses = new Class[entities.length];
for (int i = 0; i < entities.length; i++) {
entityClasses[i] = getClass().getClassLoader().loadClass(entities[i]);
}
return entityClasses;
}
protected void updateOrValidateDB(MigrationStrategy strategy, KeycloakSession session, MongoUpdaterProvider mongoUpdater) {
switch (strategy) {
case UPDATE:
mongoUpdater.update(session, db);
break;
case VALIDATE:
mongoUpdater.validate(session, db);
break;
}
}
@Override
public void close() {
if (client != null) {
client.close();
}
}
@Override
public String getId() {
return "default";
}
/**
* Override this method if you want more possibility to configure Mongo client. It can be also used to inject mongo client
* from different source.
*
* This method can assume that "config" is already set and can use it.
*
* @return mongoClient instance, which will be shared for whole Keycloak
*
* @throws UnknownHostException
*/
protected MongoClient createMongoClient() throws UnknownHostException {
operationalInfo = new LinkedHashMap<>();
String dbName = config.get("db", "keycloak");
String uriString = config.get("uri");
if (uriString != null) {
MongoClientURI uri = new MongoClientURI(uriString);
MongoClient client = new MongoClient(uri);
StringBuilder hostsBuilder = new StringBuilder();
for (int i=0 ; i<uri.getHosts().size() ; i++) {
if (i!=0) {
hostsBuilder.append(", ");
}
hostsBuilder.append(uri.getHosts().get(i));
}
String hosts = hostsBuilder.toString();
operationalInfo.put("mongoHosts", hosts);
operationalInfo.put("mongoDatabaseName", dbName);
operationalInfo.put("mongoUser", uri.getUsername());
logger.debugv("Initialized mongo model. host(s): %s, db: %s", uri.getHosts(), dbName);
return client;
} else {
String host = config.get("host", ServerAddress.defaultHost());
int port = config.getInt("port", ServerAddress.defaultPort());
String user = config.get("user");
String password = config.get("password");
MongoClientOptions clientOptions = getClientOptions();
MongoClient client;
if (user != null && password != null) {
MongoCredential credential = MongoCredential.createCredential(user, dbName, password.toCharArray());
client = new MongoClient(new ServerAddress(host, port), Collections.singletonList(credential), clientOptions);
} else {
client = new MongoClient(new ServerAddress(host, port), clientOptions);
}
operationalInfo.put("mongoServerAddress", client.getAddress().toString());
operationalInfo.put("mongoDatabaseName", dbName);
operationalInfo.put("mongoUser", user);
logger.debugv("Initialized mongo model. host: %s, port: %d, db: %s", host, port, dbName);
return client;
}
}
protected MongoClientOptions getClientOptions() {
MongoClientOptions.Builder builder = MongoClientOptions.builder();
checkIntOption("connectionsPerHost", builder);
checkIntOption("threadsAllowedToBlockForConnectionMultiplier", builder);
checkIntOption("maxWaitTime", builder);
checkIntOption("connectTimeout", builder);
checkIntOption("socketTimeout", builder);
checkBooleanOption("socketKeepAlive", builder);
checkBooleanOption("autoConnectRetry", builder);
if(config.getBoolean("ssl", false)) {
builder.socketFactory(SSLSocketFactory.getDefault());
}
return builder.build();
}
protected void checkBooleanOption(String optionName, MongoClientOptions.Builder builder) {
Boolean val = config.getBoolean(optionName);
if (val != null) {
try {
Method m = MongoClientOptions.Builder.class.getMethod(optionName, boolean.class);
m.invoke(builder, val);
} catch (Exception e) {
throw new IllegalStateException("Problem configuring boolean option " + optionName + " for mongo client. Ensure you used correct value true or false and if this option is supported by mongo driver", e);
}
}
}
protected void checkIntOption(String optionName, MongoClientOptions.Builder builder) {
Integer val = config.getInt(optionName);
if (val != null) {
try {
Method m = MongoClientOptions.Builder.class.getMethod(optionName, int.class);
m.invoke(builder, val);
} catch (Exception e) {
throw new IllegalStateException("Problem configuring int option " + optionName + " for mongo client. Ensure you used correct value (number) and if this option is supported by mongo driver", e);
}
}
}
@Override
public Map<String,String> getOperationalInfo() {
return operationalInfo;
}
private MigrationStrategy getMigrationStrategy() {
String migrationStrategy = config.get("migrationStrategy");
if (migrationStrategy == null) {
// Support 'databaseSchema' for backwards compatibility
migrationStrategy = config.get("databaseSchema");
}
if (migrationStrategy != null) {
return MigrationStrategy.valueOf(migrationStrategy.toUpperCase());
} else {
return MigrationStrategy.UPDATE;
}
}
}

View file

@ -1,58 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo;
import com.mongodb.DB;
import org.keycloak.connections.mongo.api.MongoStore;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class DefaultMongoConnectionProvider implements MongoConnectionProvider {
private DB db;
private MongoStore mongoStore;
private MongoStoreInvocationContext invocationContext;
public DefaultMongoConnectionProvider(DB db, MongoStore mongoStore, MongoStoreInvocationContext invocationContext) {
this.db = db;
this.mongoStore = mongoStore;
this.invocationContext = invocationContext;
}
@Override
public DB getDB() {
return db;
}
@Override
public MongoStore getMongoStore() {
return mongoStore;
}
@Override
public MongoStoreInvocationContext getInvocationContext() {
return invocationContext;
}
@Override
public void close() {
}
}

View file

@ -1,39 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo;
import com.mongodb.DB;
import org.keycloak.connections.mongo.api.MongoStore;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.provider.Provider;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public interface MongoConnectionProvider extends Provider {
/**
* @return Fully updated and initialized DB
*/
DB getDB();
MongoStore getMongoStore();
MongoStoreInvocationContext getInvocationContext();
}

View file

@ -1,33 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo;
import com.mongodb.DB;
import org.keycloak.provider.ProviderFactory;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public interface MongoConnectionProviderFactory extends ProviderFactory<MongoConnectionProvider> {
/**
* @return DB object, which may not be yet updated to current Keycloak version. Useful just if something needs to be done even before DB update (for example acquire DB lock)
*/
DB getDBBeforeUpdate();
}

View file

@ -1,49 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo;
import org.keycloak.provider.Provider;
import org.keycloak.provider.ProviderFactory;
import org.keycloak.provider.Spi;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class MongoConnectionSpi implements Spi {
@Override
public boolean isInternal() {
return true;
}
@Override
public String getName() {
return "connectionsMongo";
}
@Override
public Class<? extends Provider> getProviderClass() {
return MongoConnectionProvider.class;
}
@Override
public Class<? extends ProviderFactory> getProviderFactoryClass() {
return MongoConnectionProviderFactory.class;
}
}

View file

@ -1,85 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo;
import com.mongodb.MongoException;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.connections.mongo.impl.MongoStoreImpl;
import org.keycloak.models.KeycloakTransaction;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class MongoKeycloakTransaction implements KeycloakTransaction {
private final MongoStoreInvocationContext invocationContext;
private boolean started = false;
private boolean rollbackOnly = false;
public MongoKeycloakTransaction(MongoStoreInvocationContext invocationContext) {
this.invocationContext = invocationContext;
}
@Override
public void begin() {
if (started) {
throw new IllegalStateException("Transaction already started");
}
started = true;
invocationContext.begin();
}
@Override
public void commit() {
if (!started) {
throw new IllegalStateException("Transaction not yet started");
}
if (rollbackOnly) {
throw new IllegalStateException("Can't commit as transaction marked for rollback");
}
try {
invocationContext.commit();
} catch (MongoException e) {
throw MongoStoreImpl.convertException(e);
}
started = false;
}
@Override
public void rollback() {
invocationContext.rollback();
started = false;
}
@Override
public void setRollbackOnly() {
this.rollbackOnly = true;
}
@Override
public boolean getRollbackOnly() {
return rollbackOnly;
}
@Override
public boolean isActive() {
return started;
}
}

View file

@ -1,38 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.api;
import java.lang.annotation.Documented;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
@Target({TYPE})
@Documented
@Retention(RUNTIME)
@Inherited
public @interface MongoCollection {
String collectionName();
}

View file

@ -1,26 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.api;
/**
* Base interface for object, which is persisted in Mongo
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public interface MongoEntity {
}

View file

@ -1,37 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.api;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
@Target({METHOD, FIELD})
@Documented
@Retention(RUNTIME)
public @interface MongoField {
// TODO: fieldName add lazy loading?
}

View file

@ -1,38 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.api;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
/**
* Entity with Id
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public interface MongoIdentifiableEntity extends MongoEntity {
public String getId();
public void setId(String id);
/**
* Lifecycle callback, which is called after removal of this object from Mongo.
* It may be useful for triggering removal of wired objects.
*/
void afterRemove(MongoStoreInvocationContext invocationContext);
}

View file

@ -1,96 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.api;
import com.mongodb.DBObject;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import java.util.List;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public interface MongoStore {
/**
* Insert new entity
*
* @param entity to insert
*/
void insertEntity(MongoIdentifiableEntity entity, MongoStoreInvocationContext context);
/**
* Update existing entity
*
* @param entity to update
*/
void updateEntity(MongoIdentifiableEntity entity, MongoStoreInvocationContext context);
/**
* Bulk update of more entities of some type
*
* @param type
* @param query
* @param update
* @param context
* @return count of updated entities
*/
<T extends MongoIdentifiableEntity> int updateEntities(Class<T> type, DBObject query, DBObject update, MongoStoreInvocationContext context);
<T extends MongoIdentifiableEntity> T loadEntity(Class<T> type, String id, MongoStoreInvocationContext context);
<T extends MongoIdentifiableEntity> T loadSingleEntity(Class<T> type, DBObject query, MongoStoreInvocationContext context);
/**
* @param type
* @param query
* @param context
* @return query result or empty list if no results available for the query. Doesn't return null
*/
<T extends MongoIdentifiableEntity> List<T> loadEntities(Class<T> type, DBObject query, MongoStoreInvocationContext context);
/**
* @param type
* @param query
* @param context
* @return query result or empty list if no results available for the query. Doesn't return null
*/
<T extends MongoIdentifiableEntity> List<T> loadEntities(Class<T> type, DBObject query, DBObject sort, int firstResult, int maxResults, MongoStoreInvocationContext context);
<T extends MongoIdentifiableEntity> int countEntities(Class<T> type, DBObject query, MongoStoreInvocationContext context);
boolean removeEntity(MongoIdentifiableEntity entity, MongoStoreInvocationContext context);
boolean removeEntity(Class<? extends MongoIdentifiableEntity> type, String id, MongoStoreInvocationContext context);
/**
*
* @param type
* @param query
* @param callback if true, then store will first load all entities, then call "afterRemove" for every entity. If false, the entities are removed directly without load and calling "afterRemove" callback
* false has better performance (especially if we are going to remove big number of entities)
* @param context
* @return count of removed entities
*/
int removeEntities(Class<? extends MongoIdentifiableEntity> type, DBObject query, boolean callback, MongoStoreInvocationContext context);
<S> boolean pushItemToList(MongoIdentifiableEntity entity, String listPropertyName, S itemToPush, boolean skipIfAlreadyPresent, MongoStoreInvocationContext context);
<S> boolean pullItemFromList(MongoIdentifiableEntity entity, String listPropertyName, S itemToPull, MongoStoreInvocationContext context);
}

View file

@ -1,51 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.api.context;
import org.keycloak.connections.mongo.api.MongoIdentifiableEntity;
import org.keycloak.connections.mongo.api.MongoStore;
/**
* Context, which provides callback methods to be invoked by MongoStore
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public interface MongoStoreInvocationContext {
void addCreatedEntity(MongoIdentifiableEntity entity);
void addLoadedEntity(MongoIdentifiableEntity entity);
<T extends MongoIdentifiableEntity> T getLoadedEntity(Class<T> type, String id);
void addUpdateTask(MongoIdentifiableEntity entityToUpdate, MongoTask task);
void addRemovedEntity(MongoIdentifiableEntity entity);
void beforeDBSearch(Class<? extends MongoIdentifiableEntity> entityType);
void beforeDBBulkUpdateOrRemove(Class<? extends MongoIdentifiableEntity> entityType);
void begin();
void commit();
void rollback();
MongoStore getMongoStore();
}

View file

@ -1,28 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.api.context;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public interface MongoTask {
void execute();
boolean isFullUpdate();
}

View file

@ -1,39 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.api.types;
/**
* SPI object to convert object from application type to database type and vice versa. Shouldn't be directly used by application.
* Various mappers should be registered in MapperRegistry, which is main entry point to be used by application
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public interface Mapper<T, S> {
/**
* Convert object from one type to expected type
*
* @param mapperContext Encapsulates reference to converted object and other things, which might be helpful in conversion
* @return converted object
*/
S convertObject(MapperContext<T, S> mapperContext);
Class<? extends T> getTypeOfObjectToConvert();
Class<S> getExpectedReturnType();
}

View file

@ -1,54 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.api.types;
import java.lang.reflect.Type;
import java.util.List;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class MapperContext<T, S> {
// object to convert
private final T objectToConvert;
// expected return type, which could be useful information in some mappers, so they are able to dynamically instantiate types
private final Class<? extends S> expectedReturnType;
// in case that expected return type is generic type (like "List<String>"), then genericTypes could contain list of expected generic arguments
private final List<Type> genericTypes;
public MapperContext(T objectToConvert, Class<? extends S> expectedReturnType, List<Type> genericTypes) {
this.objectToConvert = objectToConvert;
this.expectedReturnType = expectedReturnType;
this.genericTypes = genericTypes;
}
public T getObjectToConvert() {
return objectToConvert;
}
public Class<? extends S> getExpectedReturnType() {
return expectedReturnType;
}
public List<Type> getGenericTypes() {
return genericTypes;
}
}

View file

@ -1,136 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.api.types;
import java.util.HashMap;
import java.util.Map;
/**
* Registry of mappers, which allow to convert application object to database objects. MapperRegistry is main entry point to be used by application.
* Application can create instance of MapperRegistry and then register required Mapper objects.
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class MapperRegistry {
// TODO: Thread-safety support (maybe...)
// Mappers of Application objects to DB objects
private Map<Class<?>, Mapper<?, ?>> appObjectMappers = new HashMap<Class<?>, Mapper<?, ?>>();
// Mappers of DB objects to Application objects
private Map<Class<?>, Map<Class<?>, Mapper<?, ?>>> dbObjectMappers = new HashMap<Class<?>, Map<Class<?>, Mapper<?,?>>>();
/**
* Add mapper for converting application objects to DB objects
*
* @param mapper
*/
public void addAppObjectMapper(Mapper<?, ?> mapper) {
appObjectMappers.put(mapper.getTypeOfObjectToConvert(), mapper);
}
/**
* Add mapper for converting DB objects to application objects
*
* @param mapper
*/
public void addDBObjectMapper(Mapper<?, ?> mapper) {
Class<?> dbObjectType = mapper.getTypeOfObjectToConvert();
Class<?> appObjectType = mapper.getExpectedReturnType();
Map<Class<?>, Mapper<?, ?>> appObjects = dbObjectMappers.get(dbObjectType);
if (appObjects == null) {
appObjects = new HashMap<Class<?>, Mapper<?, ?>>();
dbObjectMappers.put(dbObjectType, appObjects);
}
appObjects.put(appObjectType, mapper);
}
public <S> S convertDBObjectToApplicationObject(MapperContext<Object, S> context) {
if (context.getObjectToConvert() == null) {
return null;
}
Object dbObject = context.getObjectToConvert();
Class<?> expectedApplicationObjectType = context.getExpectedReturnType();
Class<?> dbObjectType = dbObject.getClass();
Mapper<Object, S> mapper;
Map<Class<?>, Mapper<?, ?>> appObjects = dbObjectMappers.get(dbObjectType);
if (appObjects == null) {
throw new IllegalArgumentException("Not found any mappers for type " + dbObjectType);
} else {
if (appObjects.size() == 1) {
mapper = (Mapper<Object, S>)appObjects.values().iterator().next();
} else {
// Try to find converter for requested application type
mapper = (Mapper<Object, S>)getAppConverterForType(context.getExpectedReturnType(), appObjects);
}
}
if (mapper == null) {
throw new IllegalArgumentException("Can't found mapper for type " + dbObjectType + " and expectedApplicationType " + expectedApplicationObjectType);
}
return mapper.convertObject(context);
}
public <S> S convertApplicationObjectToDBObject(Object applicationObject, Class<S> expectedDBObjectType) {
if (applicationObject == null) {
return null;
}
Class<?> appObjectType = applicationObject.getClass();
Mapper<Object, S> mapper = (Mapper<Object, S>)getAppConverterForType(appObjectType, appObjectMappers);
if (mapper == null) {
throw new IllegalArgumentException("Can't found converter for type " + appObjectType + " in registered appObjectMappers");
}
if (!expectedDBObjectType.isAssignableFrom(mapper.getExpectedReturnType())) {
throw new IllegalArgumentException("Converter " + mapper + " has return type " + mapper.getExpectedReturnType() +
" but we need type " + expectedDBObjectType);
}
return mapper.convertObject(new MapperContext<Object, S>(applicationObject, expectedDBObjectType, null));
}
// Try to find converter for given type or all it's supertypes
private static Mapper<Object, ?> getAppConverterForType(Class<?> appObjectType, Map<Class<?>, Mapper<?, ?>> appObjectConverters) {
Mapper<Object, ?> mapper = (Mapper<Object, ?>)appObjectConverters.get(appObjectType);
if (mapper != null) {
return mapper;
} else {
Class<?>[] interfaces = appObjectType.getInterfaces();
for (Class<?> interface1 : interfaces) {
mapper = getAppConverterForType(interface1, appObjectConverters);
if (mapper != null) {
return mapper;
}
}
Class<?> superType = appObjectType.getSuperclass();
if (superType != null) {
return getAppConverterForType(superType, appObjectConverters);
} else {
return null;
}
}
}
}

View file

@ -1,57 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.impl;
import org.keycloak.models.utils.reflection.Property;
import java.util.Collection;
import java.util.Map;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class EntityInfo {
private final Class<?> entityClass;
private final String dbCollectionName;
private final Map<String, Property<Object>> properties;
public EntityInfo(Class<?> entityClass, String dbCollectionName, Map<String, Property<Object>> properties) {
this.entityClass = entityClass;
this.dbCollectionName = dbCollectionName;
this.properties = properties;
}
public Class<?> getEntityClass() {
return entityClass;
}
public String getDbCollectionName() {
return dbCollectionName;
}
public Collection<Property<Object>> getProperties() {
return properties.values();
}
public Property<Object> getPropertyByName(String propertyName) {
return properties.get(propertyName);
}
}

View file

@ -1,491 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.impl;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.mongodb.DuplicateKeyException;
import com.mongodb.MongoException;
import com.mongodb.WriteResult;
import org.jboss.logging.Logger;
import org.keycloak.connections.mongo.api.MongoCollection;
import org.keycloak.connections.mongo.api.MongoEntity;
import org.keycloak.connections.mongo.api.MongoIdentifiableEntity;
import org.keycloak.connections.mongo.api.MongoStore;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.connections.mongo.api.context.MongoTask;
import org.keycloak.connections.mongo.api.types.Mapper;
import org.keycloak.connections.mongo.api.types.MapperContext;
import org.keycloak.connections.mongo.api.types.MapperRegistry;
import org.keycloak.connections.mongo.impl.types.BasicDBListMapper;
import org.keycloak.connections.mongo.impl.types.BasicDBListToSetMapper;
import org.keycloak.connections.mongo.impl.types.BasicDBObjectMapper;
import org.keycloak.connections.mongo.impl.types.BasicDBObjectToMapMapper;
import org.keycloak.connections.mongo.impl.types.EnumToStringMapper;
import org.keycloak.connections.mongo.impl.types.ListMapper;
import org.keycloak.connections.mongo.impl.types.MapMapper;
import org.keycloak.connections.mongo.impl.types.MongoEntityMapper;
import org.keycloak.connections.mongo.impl.types.SimpleMapper;
import org.keycloak.connections.mongo.impl.types.StringToEnumMapper;
import org.keycloak.models.ModelDuplicateException;
import org.keycloak.models.ModelException;
import org.keycloak.models.utils.KeycloakModelUtils;
import org.keycloak.models.utils.reflection.Property;
import org.keycloak.models.utils.reflection.PropertyQueries;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class MongoStoreImpl implements MongoStore {
private static final Class<?>[] SIMPLE_TYPES = { String.class, Integer.class, Boolean.class, Long.class, Double.class, Character.class, Date.class, byte[].class };
private final DB database;
private static final Logger logger = Logger.getLogger(MongoStoreImpl.class);
private final MapperRegistry mapperRegistry;
private ConcurrentMap<Class<?>, EntityInfo> entityInfoCache =
new ConcurrentHashMap<Class<?>, EntityInfo>();
public MongoStoreImpl(DB database, Class<?>[] managedEntityTypes) {
this.database = database;
mapperRegistry = new MapperRegistry();
for (Class<?> simpleMapperClass : SIMPLE_TYPES) {
SimpleMapper mapper = new SimpleMapper(simpleMapperClass);
mapperRegistry.addAppObjectMapper(mapper);
mapperRegistry.addDBObjectMapper(mapper);
}
// Specific converter for ArrayList is added just for performance purposes to avoid recursive converter lookup (most of list idm will be ArrayList)
mapperRegistry.addAppObjectMapper(new ListMapper(mapperRegistry, ArrayList.class));
mapperRegistry.addAppObjectMapper(new ListMapper(mapperRegistry, List.class));
mapperRegistry.addDBObjectMapper(new BasicDBListMapper(mapperRegistry));
mapperRegistry.addAppObjectMapper(new ListMapper(mapperRegistry, HashSet.class));
mapperRegistry.addAppObjectMapper(new ListMapper(mapperRegistry, Set.class));
mapperRegistry.addDBObjectMapper(new BasicDBListToSetMapper(mapperRegistry));
mapperRegistry.addAppObjectMapper(new MapMapper(mapperRegistry, HashMap.class));
mapperRegistry.addAppObjectMapper(new MapMapper(mapperRegistry, Map.class));
mapperRegistry.addDBObjectMapper(new BasicDBObjectToMapMapper(mapperRegistry));
// Enum converters
mapperRegistry.addAppObjectMapper(new EnumToStringMapper());
mapperRegistry.addDBObjectMapper(new StringToEnumMapper());
for (Class<?> type : managedEntityTypes) {
getEntityInfo(type);
mapperRegistry.addAppObjectMapper(new MongoEntityMapper(this, mapperRegistry, type));
mapperRegistry.addDBObjectMapper(new BasicDBObjectMapper(this, mapperRegistry, type));
}
}
protected void dropDatabase() {
this.database.dropDatabase();
logger.info("Database " + this.database.getName() + " dropped in MongoDB");
}
@Override
public void insertEntity(MongoIdentifiableEntity entity, MongoStoreInvocationContext context) {
Class<? extends MongoEntity> clazz = entity.getClass();
// Find annotations for ID, for all the properties and for the name of the collection.
EntityInfo entityInfo = getEntityInfo(clazz);
// Create instance of BasicDBObject and add all declared properties to it (properties with null value probably should be skipped)
BasicDBObject dbObject = mapperRegistry.convertApplicationObjectToDBObject(entity, BasicDBObject.class);
DBCollection dbCollection = database.getCollection(entityInfo.getDbCollectionName());
String currentId = entity.getId();
// Generate random ID if not set already
if (currentId == null) {
currentId = KeycloakModelUtils.generateId();
entity.setId(currentId);
}
// Adding "_id"
dbObject.put("_id", currentId);
try {
dbCollection.insert(dbObject);
} catch (MongoException e) {
throw convertException(e);
}
// Treat object as created in this transaction (It is already submitted to transaction)
context.addCreatedEntity(entity);
}
public static ModelException convertException(MongoException e) {
if (e instanceof DuplicateKeyException) {
return new ModelDuplicateException(e);
} else {
return new ModelException(e);
}
}
@Override
public void updateEntity(final MongoIdentifiableEntity entity, MongoStoreInvocationContext context) {
MongoTask fullUpdateTask = new MongoTask() {
@Override
public void execute() {
Class<? extends MongoEntity> clazz = entity.getClass();
EntityInfo entityInfo = getEntityInfo(clazz);
BasicDBObject dbObject = mapperRegistry.convertApplicationObjectToDBObject(entity, BasicDBObject.class);
DBCollection dbCollection = database.getCollection(entityInfo.getDbCollectionName());
String currentId = entity.getId();
if (currentId == null) {
throw new IllegalStateException("Can't update entity without id: " + entity);
} else {
BasicDBObject query = new BasicDBObject("_id", currentId);
dbCollection.update(query, dbObject);
}
}
@Override
public boolean isFullUpdate() {
return true;
}
};
// update is just added to context and postponed
context.addUpdateTask(entity, fullUpdateTask);
}
@Override
public <T extends MongoIdentifiableEntity> int updateEntities(Class<T> type, DBObject query, DBObject update, MongoStoreInvocationContext context) {
context.beforeDBBulkUpdateOrRemove(type);
DBCollection collection = getDBCollectionForType(type);
WriteResult wr = collection.update(query, update, false, true);
logger.debugf("Updated %d collections of type %s", wr.getN(), type);
return wr.getN();
}
@Override
public <T extends MongoIdentifiableEntity> T loadEntity(Class<T> type, String id, MongoStoreInvocationContext context) {
// First look if we already read the object with this oid and type during this transaction. If yes, use it instead of DB lookup
T cached = context.getLoadedEntity(type, id);
if (cached != null && type.isAssignableFrom(cached.getClass())) return cached;
DBCollection dbCollection = getDBCollectionForType(type);
BasicDBObject idQuery = new BasicDBObject("_id", id);
DBObject dbObject = dbCollection.findOne(idQuery);
if (dbObject == null) return null;
MapperContext<Object, T> mapperContext = new MapperContext<Object, T>(dbObject, type, null);
T converted = mapperRegistry.convertDBObjectToApplicationObject(mapperContext);
// Now add it to loaded objects
context.addLoadedEntity(converted);
return converted;
}
@Override
public <T extends MongoIdentifiableEntity> T loadSingleEntity(Class<T> type, DBObject query, MongoStoreInvocationContext context) {
// First we should execute all pending tasks before searching DB
context.beforeDBSearch(type);
DBCollection dbCollection = getDBCollectionForType(type);
DBObject dbObject = dbCollection.findOne(query);
if (dbObject == null) {
return null;
} else {
return convertDBObjectToEntity(type, dbObject, context);
}
}
@Override
public <T extends MongoIdentifiableEntity> List<T> loadEntities(Class<T> type, DBObject query, MongoStoreInvocationContext context) {
// First we should execute all pending tasks before searching DB
context.beforeDBSearch(type);
DBCollection dbCollection = getDBCollectionForType(type);
DBCursor cursor = dbCollection.find(query);
return convertCursor(type, cursor, context);
}
@Override
public <T extends MongoIdentifiableEntity> List<T> loadEntities(Class<T> type, DBObject query, DBObject sort, int firstResult, int maxResults, MongoStoreInvocationContext context) {
// First we should execute all pending tasks before searching DB
context.beforeDBSearch(type);
DBCollection dbCollection = getDBCollectionForType(type);
DBCursor cursor = dbCollection.find(query);
if (firstResult != -1) {
cursor.skip(firstResult);
}
if (maxResults != -1) {
cursor.limit(maxResults);
}
if (sort != null) {
cursor.sort(sort);
}
return convertCursor(type, cursor, context);
}
public <T extends MongoIdentifiableEntity> int countEntities(Class<T> type, DBObject query, MongoStoreInvocationContext context) {
context.beforeDBSearch(type);
DBCollection dbCollection = getDBCollectionForType(type);
Long count = dbCollection.count(query);
// For now, assume that int is sufficient
return count.intValue();
}
@Override
public boolean removeEntity(MongoIdentifiableEntity entity, MongoStoreInvocationContext context) {
return removeEntity(entity.getClass(), entity.getId(), context);
}
@Override
public boolean removeEntity(Class<? extends MongoIdentifiableEntity> type, String id, MongoStoreInvocationContext context) {
MongoIdentifiableEntity found = loadEntity(type, id, context);
if (found == null) {
return false;
} else {
DBCollection dbCollection = getDBCollectionForType(type);
BasicDBObject dbQuery = new BasicDBObject("_id", id);
dbCollection.remove(dbQuery);
//logger.debugf("Entity of type: %s , id: %s removed from MongoDB.", type, id);
context.addRemovedEntity(found);
return true;
}
}
@Override
public int removeEntities(Class<? extends MongoIdentifiableEntity> type, DBObject query, boolean callback, MongoStoreInvocationContext context) {
if (callback) {
List<? extends MongoIdentifiableEntity> foundObjects = loadEntities(type, query, context);
if (foundObjects.size() == 0) {
return 0;
} else {
DBCollection dbCollection = getDBCollectionForType(type);
dbCollection.remove(query);
logger.debugf("Removed %d entities of type: %s, query: %s", foundObjects.size(), type, query);
for (MongoIdentifiableEntity found : foundObjects) {
context.addRemovedEntity(found);;
}
return foundObjects.size();
}
} else {
context.beforeDBBulkUpdateOrRemove(type);
DBCollection dbCollection = getDBCollectionForType(type);
WriteResult writeResult = dbCollection.remove(query);
int removedCount = writeResult.getN();
logger.debugf("Removed directly %d entities of type: %s, query: %s", removedCount, type, query);
return removedCount;
}
}
@Override
public <S> boolean pushItemToList(final MongoIdentifiableEntity entity, final String listPropertyName, S itemToPush, boolean skipIfAlreadyPresent, MongoStoreInvocationContext context) {
final Class<? extends MongoEntity> type = entity.getClass();
EntityInfo entityInfo = getEntityInfo(type);
// Add item to list directly in this object
Property<Object> listProperty = entityInfo.getPropertyByName(listPropertyName);
if (listProperty == null) {
throw new IllegalArgumentException("Property " + listPropertyName + " doesn't exist on object " + entity);
}
List<S> list = (List<S>)listProperty.getValue(entity);
if (list == null) {
list = new ArrayList<S>();
listProperty.setValue(entity, list);
}
// Skip if item is already in list
if (skipIfAlreadyPresent && list.contains(itemToPush)) {
return false;
}
// Update java object
list.add(itemToPush);
// Add update of list to pending tasks
final List<S> listt = list;
context.addUpdateTask(entity, new MongoTask() {
@Override
public void execute() {
// Now DB update of new list with usage of $set
BasicDBList dbList = mapperRegistry.convertApplicationObjectToDBObject(listt, BasicDBList.class);
BasicDBObject query = new BasicDBObject("_id", entity.getId());
BasicDBObject listObject = new BasicDBObject(listPropertyName, dbList);
BasicDBObject setCommand = new BasicDBObject("$set", listObject);
getDBCollectionForType(type).update(query, setCommand);
}
@Override
public boolean isFullUpdate() {
return false;
}
});
return true;
}
@Override
public <S> boolean pullItemFromList(final MongoIdentifiableEntity entity, final String listPropertyName, final S itemToPull, MongoStoreInvocationContext context) {
final Class<? extends MongoEntity> type = entity.getClass();
EntityInfo entityInfo = getEntityInfo(type);
// Remove item from list directly in this object
Property<Object> listProperty = entityInfo.getPropertyByName(listPropertyName);
if (listProperty == null) {
throw new IllegalArgumentException("Property " + listPropertyName + " doesn't exist on object " + entity);
}
List<S> list = (List<S>)listProperty.getValue(entity);
// If list is null, we skip both object and DB update
if (list == null || !list.contains(itemToPull)) {
return false;
} else {
// Update java object
list.remove(itemToPull);
// Add update of list to pending tasks
context.addUpdateTask(entity, new MongoTask() {
@Override
public void execute() {
// Pull item from DB
Object dbItemToPull = mapperRegistry.convertApplicationObjectToDBObject(itemToPull, Object.class);
BasicDBObject query = new BasicDBObject("_id", entity.getId());
BasicDBObject pullObject = new BasicDBObject(listPropertyName, dbItemToPull);
BasicDBObject pullCommand = new BasicDBObject("$pull", pullObject);
getDBCollectionForType(type).update(query, pullCommand);
}
@Override
public boolean isFullUpdate() {
return false;
}
});
return true;
}
}
// Possibility to add user-defined mappers
public void addAppObjectConverter(Mapper<?, ?> mapper) {
mapperRegistry.addAppObjectMapper(mapper);
}
public void addDBObjectConverter(Mapper<?, ?> mapper) {
mapperRegistry.addDBObjectMapper(mapper);
}
public EntityInfo getEntityInfo(Class<?> entityClass) {
EntityInfo entityInfo = entityInfoCache.get(entityClass);
if (entityInfo == null) {
Map<String, Property<Object>> properties = PropertyQueries.createQuery(entityClass).getWritableResultList();
MongoCollection classAnnotation = entityClass.getAnnotation(MongoCollection.class);
String dbCollectionName = classAnnotation==null ? null : classAnnotation.collectionName();
entityInfo = new EntityInfo(entityClass, dbCollectionName, properties);
EntityInfo existing = entityInfoCache.putIfAbsent(entityClass, entityInfo);
if (existing != null) {
entityInfo = existing;
}
}
return entityInfo;
}
protected <T extends MongoIdentifiableEntity> List<T> convertCursor(Class<T> type, DBCursor cursor, MongoStoreInvocationContext context) {
List<T> result = new ArrayList<T>();
try {
for (DBObject dbObject : cursor) {
T entity = convertDBObjectToEntity(type, dbObject, context);
result.add(entity);
}
} finally {
cursor.close();
}
return result;
}
protected <T extends MongoIdentifiableEntity> T convertDBObjectToEntity(Class<T> type, DBObject dbObject, MongoStoreInvocationContext context) {
// First look if we already have loaded object cached. If yes, we will use cached instance
String id = dbObject.get("_id").toString();
T object = context.getLoadedEntity(type, id);
if (object == null) {
// So convert and use fresh instance from DB
MapperContext<Object, T> mapperContext = new MapperContext<Object, T>(dbObject, type, null);
object = mapperRegistry.convertDBObjectToApplicationObject(mapperContext);
context.addLoadedEntity(object);
}
return object;
}
protected DBCollection getDBCollectionForType(Class<?> type) {
EntityInfo entityInfo = getEntityInfo(type);
String dbCollectionName = entityInfo.getDbCollectionName();
return dbCollectionName==null ? null : database.getCollection(dbCollectionName);
}
}

View file

@ -1,85 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.impl.context;
import org.keycloak.connections.mongo.api.MongoIdentifiableEntity;
import org.keycloak.connections.mongo.api.MongoStore;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.connections.mongo.api.context.MongoTask;
/**
* Context, which is not doing any postponing of tasks and does not cache anything
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class SimpleMongoStoreInvocationContext implements MongoStoreInvocationContext {
private final MongoStore mongoStore;
public SimpleMongoStoreInvocationContext(MongoStore mongoStore) {
this.mongoStore = mongoStore;
}
@Override
public void addCreatedEntity(MongoIdentifiableEntity entity) {
}
@Override
public void addLoadedEntity(MongoIdentifiableEntity entity) {
}
@Override
public <T extends MongoIdentifiableEntity> T getLoadedEntity(Class<T> type, String id) {
return null;
}
@Override
public void addUpdateTask(MongoIdentifiableEntity entityToUpdate, MongoTask task) {
task.execute();
}
@Override
public void addRemovedEntity(MongoIdentifiableEntity entity) {
entity.afterRemove(this);
}
@Override
public void beforeDBSearch(Class<? extends MongoIdentifiableEntity> entityType) {
}
@Override
public void beforeDBBulkUpdateOrRemove(Class<? extends MongoIdentifiableEntity> entityType) {
}
@Override
public void begin() {
}
@Override
public void commit() {
}
@Override
public void rollback() {
}
@Override
public MongoStore getMongoStore() {
return mongoStore;
}
}

View file

@ -1,171 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.impl.context;
import org.keycloak.connections.mongo.api.MongoIdentifiableEntity;
import org.keycloak.connections.mongo.api.MongoStore;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.connections.mongo.api.context.MongoTask;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
/**
* Invocation context, which has some very basic support for transactions, and is able to cache loaded objects.
* It always execute all pending update tasks before start searching for other objects
*
* It's per-request object (not thread safe)
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class TransactionMongoStoreInvocationContext implements MongoStoreInvocationContext {
// Assumption is that all objects has unique ID (unique across all the types)
private Map<String, MongoIdentifiableEntity> loadedObjects = new HashMap<String, MongoIdentifiableEntity>();
private Map<MongoIdentifiableEntity, Set<MongoTask>> pendingUpdateTasks = new HashMap<MongoIdentifiableEntity, Set<MongoTask>>();
private final MongoStore mongoStore;
public TransactionMongoStoreInvocationContext(MongoStore mongoStore) {
this.mongoStore = mongoStore;
}
@Override
public void addCreatedEntity(MongoIdentifiableEntity entity) {
// For now just add it to list of loaded objects
addLoadedEntity(entity);
}
@Override
public void addLoadedEntity(MongoIdentifiableEntity entity) {
loadedObjects.put(entity.getId(), entity);
}
@Override
public <T extends MongoIdentifiableEntity> T getLoadedEntity(Class<T> type, String id) {
return (T)loadedObjects.get(id);
}
@Override
public void addUpdateTask(MongoIdentifiableEntity entityToUpdate, MongoTask task) {
Set<MongoTask> currentObjectTasks = pendingUpdateTasks.get(entityToUpdate);
if (currentObjectTasks == null) {
currentObjectTasks = new LinkedHashSet<MongoTask>();
pendingUpdateTasks.put(entityToUpdate, currentObjectTasks);
} else {
// if task is full update, then remove all other tasks as we need to do full update of object anyway
if (task.isFullUpdate()) {
currentObjectTasks.clear();
} else {
// If it already contains task for fullUpdate, then we don't need to add ours as we need to do full update of object anyway
for (MongoTask current : currentObjectTasks) {
if (current.isFullUpdate()) {
return;
}
}
}
}
currentObjectTasks.add(task);
}
@Override
public void addRemovedEntity(MongoIdentifiableEntity entity) {
// Remove all pending tasks and object from cache
pendingUpdateTasks.remove(entity);
loadedObjects.remove(entity.getId());
entity.afterRemove(this);
}
@Override
public void beforeDBSearch(Class<? extends MongoIdentifiableEntity> entityType) {
// Now execute pending update tasks of type, which will be searched
Set<MongoIdentifiableEntity> toRemove = new HashSet<MongoIdentifiableEntity>();
for (MongoIdentifiableEntity currentEntity : pendingUpdateTasks.keySet()) {
if (currentEntity.getClass().equals(entityType)) {
Set<MongoTask> mongoTasks = pendingUpdateTasks.get(currentEntity);
for (MongoTask currentTask : mongoTasks) {
currentTask.execute();
}
toRemove.add(currentEntity);
}
}
// Now remove all done tasks
for (MongoIdentifiableEntity entity : toRemove) {
pendingUpdateTasks.remove(entity);
}
}
@Override
public void beforeDBBulkUpdateOrRemove(Class<? extends MongoIdentifiableEntity> entityType) {
beforeDBSearch(entityType);
Set<String> toRemove = new HashSet<String>();
for (Map.Entry<String, MongoIdentifiableEntity> entry : loadedObjects.entrySet()) {
MongoIdentifiableEntity entity = entry.getValue();
if (entity.getClass().equals(entityType)) {
toRemove.add(entry.getKey());
}
}
// Now remove all loadedObjects
for (String objectId : toRemove) {
loadedObjects.remove(objectId);
}
}
@Override
public void begin() {
loadedObjects.clear();
pendingUpdateTasks.clear();
}
@Override
public void commit() {
// Now execute all pending update tasks
for (Set<MongoTask> mongoTasks : pendingUpdateTasks.values()) {
for (MongoTask currentTask : mongoTasks) {
currentTask.execute();
}
}
// And clear it
loadedObjects.clear();
pendingUpdateTasks.clear();
}
@Override
public void rollback() {
// Just clear the map without executions of tasks TODO: Attempt to do complete rollback (removal of created objects, restoring of removed objects, rollback of updates)
loadedObjects.clear();
pendingUpdateTasks.clear();
}
@Override
public MongoStore getMongoStore() {
return mongoStore;
}
}

View file

@ -1,61 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.impl.types;
import com.mongodb.BasicDBList;
import org.keycloak.connections.mongo.api.types.Mapper;
import org.keycloak.connections.mongo.api.types.MapperContext;
import org.keycloak.connections.mongo.api.types.MapperRegistry;
import java.util.ArrayList;
import java.util.List;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class BasicDBListMapper implements Mapper<BasicDBList, List> {
private final MapperRegistry mapperRegistry;
public BasicDBListMapper(MapperRegistry mapperRegistry) {
this.mapperRegistry = mapperRegistry;
}
@Override
public List convertObject(MapperContext<BasicDBList, List> context) {
BasicDBList dbList = context.getObjectToConvert();
ArrayList<Object> appObjects = new ArrayList<Object>();
Class<?> expectedListElementType = (Class<?>) context.getGenericTypes().get(0);
for (Object dbObject : dbList) {
MapperContext<Object, Object> newContext = new MapperContext<Object, Object>(dbObject, expectedListElementType, null);
appObjects.add(mapperRegistry.convertDBObjectToApplicationObject(newContext));
}
return appObjects;
}
@Override
public Class<? extends BasicDBList> getTypeOfObjectToConvert() {
return BasicDBList.class;
}
@Override
public Class<List> getExpectedReturnType() {
return List.class;
}
}

View file

@ -1,61 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.impl.types;
import com.mongodb.BasicDBList;
import org.keycloak.connections.mongo.api.types.Mapper;
import org.keycloak.connections.mongo.api.types.MapperContext;
import org.keycloak.connections.mongo.api.types.MapperRegistry;
import java.util.HashSet;
import java.util.Set;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class BasicDBListToSetMapper implements Mapper<BasicDBList, Set> {
private final MapperRegistry mapperRegistry;
public BasicDBListToSetMapper(MapperRegistry mapperRegistry) {
this.mapperRegistry = mapperRegistry;
}
@Override
public Set convertObject(MapperContext<BasicDBList, Set> context) {
BasicDBList dbList = context.getObjectToConvert();
Set<Object> appObjects = new HashSet<Object>();
Class<?> expectedListElementType = (Class<?>) context.getGenericTypes().get(0);
for (Object dbObject : dbList) {
MapperContext<Object, Object> newContext = new MapperContext<Object, Object>(dbObject, expectedListElementType, null);
appObjects.add(mapperRegistry.convertDBObjectToApplicationObject(newContext));
}
return appObjects;
}
@Override
public Class<? extends BasicDBList> getTypeOfObjectToConvert() {
return BasicDBList.class;
}
@Override
public Class<Set> getExpectedReturnType() {
return Set.class;
}
}

View file

@ -1,137 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.impl.types;
import com.mongodb.BasicDBObject;
import org.jboss.logging.Logger;
import org.keycloak.common.util.reflections.Types;
import org.keycloak.connections.mongo.api.MongoIdentifiableEntity;
import org.keycloak.connections.mongo.api.types.Mapper;
import org.keycloak.connections.mongo.api.types.MapperContext;
import org.keycloak.connections.mongo.api.types.MapperRegistry;
import org.keycloak.connections.mongo.impl.EntityInfo;
import org.keycloak.connections.mongo.impl.MongoStoreImpl;
import org.keycloak.models.utils.reflection.Property;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Arrays;
import java.util.List;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class BasicDBObjectMapper<S> implements Mapper<BasicDBObject, S> {
private static final Logger logger = Logger.getLogger(BasicDBObjectMapper.class);
private final MongoStoreImpl mongoStoreImpl;
private final MapperRegistry mapperRegistry;
private final Class<S> expectedEntityType;
public BasicDBObjectMapper(MongoStoreImpl mongoStoreImpl, MapperRegistry mapperRegistry, Class<S> expectedEntityType) {
this.mongoStoreImpl = mongoStoreImpl;
this.mapperRegistry = mapperRegistry;
this.expectedEntityType = expectedEntityType;
}
@Override
public S convertObject(MapperContext<BasicDBObject, S> context) {
BasicDBObject dbObject = context.getObjectToConvert();
if (dbObject == null) {
return null;
}
EntityInfo entityInfo = mongoStoreImpl.getEntityInfo(expectedEntityType);
S entity;
try {
entity = expectedEntityType.newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
for (String key : dbObject.keySet()) {
Object value = dbObject.get(key);
Property<Object> property;
if ("_id".equals(key)) {
// Current property is "id"
if (entity instanceof MongoIdentifiableEntity) {
((MongoIdentifiableEntity)entity).setId(value.toString());
}
} else if ((property = entityInfo.getPropertyByName(key)) != null) {
// It's declared property with @DBField annotation
setPropertyValue(entity, value, property);
} else {
// Show warning if it's unknown
logger.warn("Property with key " + key + " not known for type " + expectedEntityType);
}
}
return entity;
}
private void setPropertyValue(Object entity, Object valueFromDB, Property property) {
if (valueFromDB == null) {
property.setValue(entity, null);
return;
}
MapperContext<Object, Object> context;
Type type = property.getBaseType();
// This can be the case when we have parameterized type (like "List<String>")
if (type instanceof ParameterizedType) {
ParameterizedType parameterized = (ParameterizedType) type;
Type[] genericTypeArguments = parameterized.getActualTypeArguments();
List<Type> genericTypes = Arrays.asList(genericTypeArguments);
Class<?> expectedReturnType = (Class<?>)parameterized.getRawType();
context = new MapperContext<Object, Object>(valueFromDB, expectedReturnType, genericTypes);
} else {
Class<?> expectedReturnType = (Class<?>)type;
// handle primitives
expectedReturnType = Types.boxedClass(expectedReturnType);
context = new MapperContext<Object, Object>(valueFromDB, expectedReturnType, null);
}
Object appObject = mapperRegistry.convertDBObjectToApplicationObject(context);
if (Types.boxedClass(property.getJavaClass()).isAssignableFrom(appObject.getClass())) {
property.setValue(entity, appObject);
} else {
throw new IllegalStateException("Converted object " + appObject + " is not of type " + context.getExpectedReturnType() +
". So can't be assigned as property " + property.getName() + " of " + entity.getClass());
}
}
@Override
public Class<? extends BasicDBObject> getTypeOfObjectToConvert() {
return BasicDBObject.class;
}
@Override
public Class<S> getExpectedReturnType() {
return expectedEntityType;
}
}

View file

@ -1,94 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.impl.types;
import com.mongodb.BasicDBObject;
import org.keycloak.connections.mongo.api.types.Mapper;
import org.keycloak.connections.mongo.api.types.MapperContext;
import org.keycloak.connections.mongo.api.types.MapperRegistry;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class BasicDBObjectToMapMapper implements Mapper<BasicDBObject, Map> {
private final MapperRegistry mapperRegistry;
public BasicDBObjectToMapMapper(MapperRegistry mapperRegistry) {
this.mapperRegistry = mapperRegistry;
}
@Override
public Map convertObject(MapperContext<BasicDBObject, Map> context) {
BasicDBObject dbObjectToConvert = context.getObjectToConvert();
Type expectedElementValueType = context.getGenericTypes().get(1);
HashMap<String, Object> result = new HashMap<String, Object>();
for (Map.Entry<String, Object> entry : dbObjectToConvert.entrySet()) {
String key = entry.getKey();
Object dbValue = entry.getValue();
// Workaround as manually inserted numbers into mongo may be treated as "Double"
if (dbValue instanceof Double && expectedElementValueType == Integer.class) {
dbValue = ((Double)dbValue).intValue();
}
MapperContext<Object, Object> newContext = getMapperContext(dbValue, expectedElementValueType);
Object value = mapperRegistry.convertDBObjectToApplicationObject(newContext);
if (key.contains(MapMapper.DOT_PLACEHOLDER)) {
key = key.replaceAll(MapMapper.DOT_PLACEHOLDER, ".");
}
result.put(key, value);
}
return result;
}
@Override
public Class<? extends BasicDBObject> getTypeOfObjectToConvert() {
return BasicDBObject.class;
}
@Override
public Class<Map> getExpectedReturnType() {
return Map.class;
}
private MapperContext<Object, Object> getMapperContext(Object dbValue, Type expectedElementValueType) {
if (expectedElementValueType instanceof Class) {
Class<?> clazz = (Class<?>) expectedElementValueType;
return new MapperContext<>(dbValue, clazz, null);
} else if (expectedElementValueType instanceof ParameterizedType) {
ParameterizedType parameterized = (ParameterizedType) expectedElementValueType;
Class<?> expectedClazz = (Class<?>) parameterized.getRawType();
Type[] generics = parameterized.getActualTypeArguments();
return new MapperContext<>(dbValue, expectedClazz, Arrays.asList(generics));
} else {
throw new IllegalArgumentException("Unexpected type: '" + expectedElementValueType + "' for converting " + dbValue);
}
}
}

View file

@ -1,44 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.impl.types;
import org.keycloak.connections.mongo.api.types.Mapper;
import org.keycloak.connections.mongo.api.types.MapperContext;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class EnumToStringMapper implements Mapper<Enum, String> {
@Override
public String convertObject(MapperContext<Enum, String> context) {
Enum objectToConvert = context.getObjectToConvert();
return objectToConvert.toString();
}
@Override
public Class<? extends Enum> getTypeOfObjectToConvert() {
return Enum.class;
}
@Override
public Class<String> getExpectedReturnType() {
return String.class;
}
}

View file

@ -1,62 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.impl.types;
import com.mongodb.BasicDBList;
import org.keycloak.connections.mongo.api.types.Mapper;
import org.keycloak.connections.mongo.api.types.MapperContext;
import org.keycloak.connections.mongo.api.types.MapperRegistry;
import java.util.Collection;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class ListMapper<T extends Collection> implements Mapper<T, BasicDBList> {
private final MapperRegistry mapperRegistry;
private final Class<T> listType;
public ListMapper(MapperRegistry mapperRegistry, Class<T> listType) {
this.mapperRegistry = mapperRegistry;
this.listType = listType;
}
@Override
public BasicDBList convertObject(MapperContext<T, BasicDBList> context) {
T appObjectsList = context.getObjectToConvert();
BasicDBList dbObjects = new BasicDBList();
for (Object appObject : appObjectsList) {
Object dbObject = mapperRegistry.convertApplicationObjectToDBObject(appObject, Object.class);
dbObjects.add(dbObject);
}
return dbObjects;
}
@Override
public Class<? extends T> getTypeOfObjectToConvert() {
return listType;
}
@Override
public Class<BasicDBList> getExpectedReturnType() {
return BasicDBList.class;
}
}

View file

@ -1,79 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.impl.types;
import com.mongodb.BasicDBObject;
import org.keycloak.connections.mongo.api.types.Mapper;
import org.keycloak.connections.mongo.api.types.MapperContext;
import org.keycloak.connections.mongo.api.types.MapperRegistry;
import java.util.Map;
import java.util.Set;
/**
* For now, we support just convert from Map<String, simpleType>
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class MapMapper<T extends Map> implements Mapper<T, BasicDBObject> {
// Just some dummy way of encoding . character as it's not allowed by mongo in key fields
static final String DOT_PLACEHOLDER = "###";
private final MapperRegistry mapperRegistry;
private final Class<T> mapType;
public MapMapper(MapperRegistry mapperRegistry, Class<T> mapType) {
this.mapperRegistry = mapperRegistry;
this.mapType = mapType;
}
@Override
public BasicDBObject convertObject(MapperContext<T, BasicDBObject> context) {
T mapToConvert = context.getObjectToConvert();
return convertMap(mapToConvert, mapperRegistry);
}
public static BasicDBObject convertMap(Map mapToConvert, MapperRegistry mapperRegistry) {
BasicDBObject dbObject = new BasicDBObject();
Set<Map.Entry> entries = mapToConvert.entrySet();
for (Map.Entry entry : entries) {
String key = (String)entry.getKey();
Object value = entry.getValue();
Object dbValue = mapperRegistry==null ? entry.getValue() : mapperRegistry.convertApplicationObjectToDBObject(value, Object.class);
if (key.contains(".")) {
key = key.replaceAll("\\.", DOT_PLACEHOLDER);
}
dbObject.put(key, dbValue);
}
return dbObject;
}
@Override
public Class<? extends T> getTypeOfObjectToConvert() {
return mapType;
}
@Override
public Class<BasicDBObject> getExpectedReturnType() {
return BasicDBObject.class;
}
}

View file

@ -1,80 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.impl.types;
import com.mongodb.BasicDBObject;
import org.keycloak.connections.mongo.api.MongoIdentifiableEntity;
import org.keycloak.connections.mongo.api.types.Mapper;
import org.keycloak.connections.mongo.api.types.MapperContext;
import org.keycloak.connections.mongo.api.types.MapperRegistry;
import org.keycloak.connections.mongo.impl.EntityInfo;
import org.keycloak.connections.mongo.impl.MongoStoreImpl;
import org.keycloak.models.utils.reflection.Property;
import java.util.Collection;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class MongoEntityMapper<T> implements Mapper<T, BasicDBObject> {
private final MongoStoreImpl mongoStoreImpl;
private final MapperRegistry mapperRegistry;
private final Class<T> expectedMongoEntityType;
public MongoEntityMapper(MongoStoreImpl mongoStoreImpl, MapperRegistry mapperRegistry, Class<T> expectedMongoEntityType) {
this.mongoStoreImpl = mongoStoreImpl;
this.mapperRegistry = mapperRegistry;
this.expectedMongoEntityType = expectedMongoEntityType;
}
@Override
public BasicDBObject convertObject(MapperContext<T, BasicDBObject> context) {
T applicationObject = context.getObjectToConvert();
EntityInfo entityInfo = mongoStoreImpl.getEntityInfo(applicationObject.getClass());
// Create instance of BasicDBObject and add all declared properties to it
BasicDBObject dbObject = new BasicDBObject();
Collection<Property<Object>> props = entityInfo.getProperties();
for (Property<Object> property : props) {
String propName = property.getName();
// Ignore "id" property
if (!"id".equals(propName) || !(applicationObject instanceof MongoIdentifiableEntity)) {
Object propValue = property.getValue(applicationObject);
if (propValue != null) {
Object dbValue = propValue == null ? null : mapperRegistry.convertApplicationObjectToDBObject(propValue, Object.class);
dbObject.put(propName, dbValue);
}
}
}
return dbObject;
}
@Override
public Class<? extends T> getTypeOfObjectToConvert() {
return expectedMongoEntityType;
}
@Override
public Class<BasicDBObject> getExpectedReturnType() {
return BasicDBObject.class;
}
}

View file

@ -1,51 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.impl.types;
import org.keycloak.connections.mongo.api.types.Mapper;
import org.keycloak.connections.mongo.api.types.MapperContext;
/**
* Just returns input
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class SimpleMapper<T> implements Mapper<T, T> {
private final Class<T> expectedType;
public SimpleMapper(Class<T> expectedType) {
this.expectedType = expectedType;
}
@Override
public T convertObject(MapperContext<T, T> context) {
T objectToConvert = context.getObjectToConvert();
return objectToConvert;
}
@Override
public Class<? extends T> getTypeOfObjectToConvert() {
return expectedType;
}
@Override
public Class<T> getExpectedReturnType() {
return expectedType;
}
}

View file

@ -1,45 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.impl.types;
import org.keycloak.connections.mongo.api.types.Mapper;
import org.keycloak.connections.mongo.api.types.MapperContext;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class StringToEnumMapper implements Mapper<String, Enum> {
@Override
public Enum convertObject(MapperContext<String, Enum> context) {
String enumValue = context.getObjectToConvert();
Class<? extends Enum> clazz = context.getExpectedReturnType();
return Enum.valueOf(clazz, enumValue);
}
@Override
public Class<? extends String> getTypeOfObjectToConvert() {
return String.class;
}
@Override
public Class<Enum> getExpectedReturnType() {
return Enum.class;
}
}

View file

@ -1,149 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.lock;
import com.mongodb.BasicDBObject;
import com.mongodb.DB;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.mongodb.DuplicateKeyException;
import com.mongodb.WriteResult;
import org.jboss.logging.Logger;
import org.keycloak.common.util.HostUtils;
import org.keycloak.common.util.Time;
import org.keycloak.models.dblock.DBLockProvider;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class MongoDBLockProvider implements DBLockProvider {
private static final String DB_LOCK_COLLECTION = "dblock";
private static final Logger logger = Logger.getLogger(MongoDBLockProvider .class);
private final MongoDBLockProviderFactory factory;
private final DB db;
public MongoDBLockProvider(MongoDBLockProviderFactory factory, DB db) {
this.factory = factory;
this.db = db;
}
@Override
public void waitForLock() {
boolean locked = false;
long startTime = Time.toMillis(Time.currentTime());
long timeToGiveUp = startTime + (factory.getLockWaitTimeoutMillis());
while (!locked && Time.toMillis(Time.currentTime()) < timeToGiveUp) {
locked = acquireLock();
if (!locked) {
int remainingTime = ((int)(timeToGiveUp / 1000)) - Time.currentTime();
logger.debugf("Waiting for changelog lock... Remaining time: %d seconds", remainingTime);
try {
Thread.sleep(factory.getLockRecheckTimeMillis());
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
if (!locked) {
DBObject query = new BasicDBObject("_id", 1);
DBCursor cursor = db.getCollection(DB_LOCK_COLLECTION).find(query);
String lockedBy;
if (cursor.hasNext()) {
DBObject dbObj = cursor.next();
lockedBy = dbObj.get("lockedBy") + " since " + Time.toDate(((int)((long) dbObj.get("lockedSince") / 1000)));
} else {
lockedBy = "UNKNOWN";
}
throw new IllegalStateException("Could not acquire change log lock. Currently locked by " + lockedBy);
}
}
private boolean acquireLock() {
DBObject query = new BasicDBObject("locked", false);
BasicDBObject update = new BasicDBObject("locked", true);
update.append("_id", 1);
update.append("lockedSince", Time.toMillis(Time.currentTime()));
update.append("lockedBy", HostUtils.getHostName()); // Maybe replace with something better, but doesn't matter for now
try {
WriteResult wr = db.getCollection(DB_LOCK_COLLECTION).update(query, update, true, false);
if (wr.getN() == 1) {
logger.debugf("Successfully acquired DB lock");
factory.setHasLock(true);
return true;
} else {
return false;
}
} catch (DuplicateKeyException dke) {
logger.debugf("Failed acquire lock. Reason: %s", dke.getMessage());
}
return false;
}
@Override
public void releaseLock() {
DBObject query = new BasicDBObject("locked", true);
BasicDBObject update = new BasicDBObject("locked", false);
update.append("_id", 1);
update.append("lockedBy", null);
update.append("lockedSince", null);
try {
WriteResult wr = db.getCollection(DB_LOCK_COLLECTION).update(query, update, true, false);
if (wr.getN() > 0) {
factory.setHasLock(false);
logger.debugf("Successfully released DB lock");
} else {
logger.warnf("Attempt to release DB lock, but nothing was released");
}
} catch (DuplicateKeyException dke) {
logger.debugf("Failed release lock. Reason: %s", dke.getMessage());
}
}
@Override
public boolean hasLock() {
return factory.hasLock();
}
@Override
public boolean supportsForcedUnlock() {
return true;
}
@Override
public void destroyLockInfo() {
db.getCollection(DB_LOCK_COLLECTION).remove(new BasicDBObject());
logger.debugf("Destroyed lock collection");
}
@Override
public void close() {
}
}

View file

@ -1,98 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.lock;
import com.mongodb.DB;
import org.jboss.logging.Logger;
import org.keycloak.Config;
import org.keycloak.common.util.Time;
import org.keycloak.connections.mongo.MongoConnectionProvider;
import org.keycloak.connections.mongo.MongoConnectionProviderFactory;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.KeycloakSessionFactory;
import org.keycloak.models.dblock.DBLockProviderFactory;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class MongoDBLockProviderFactory implements DBLockProviderFactory {
private static final Logger logger = Logger.getLogger(MongoDBLockProviderFactory.class);
private long lockRecheckTimeMillis;
private long lockWaitTimeoutMillis;
// True if this node has a lock acquired
private AtomicBoolean hasLock = new AtomicBoolean(false);
protected long getLockRecheckTimeMillis() {
return lockRecheckTimeMillis;
}
protected long getLockWaitTimeoutMillis() {
return lockWaitTimeoutMillis;
}
@Override
public void init(Config.Scope config) {
int lockRecheckTime = config.getInt("lockRecheckTime", 2);
int lockWaitTimeout = config.getInt("lockWaitTimeout", 900);
this.lockRecheckTimeMillis = Time.toMillis(lockRecheckTime);
this.lockWaitTimeoutMillis = Time.toMillis(lockWaitTimeout);
logger.debugf("Mongo lock provider configured with lockWaitTime: %d seconds, lockRecheckTime: %d seconds", lockWaitTimeout, lockRecheckTime);
}
@Override
public void postInit(KeycloakSessionFactory factory) {
}
@Override
public MongoDBLockProvider create(KeycloakSession session) {
MongoConnectionProviderFactory mongoConnectionFactory = (MongoConnectionProviderFactory) session.getKeycloakSessionFactory().getProviderFactory(MongoConnectionProvider.class);
DB db = mongoConnectionFactory.getDBBeforeUpdate();
return new MongoDBLockProvider(this, db);
}
@Override
public void setTimeouts(long lockRecheckTimeMillis, long lockWaitTimeoutMillis) {
this.lockRecheckTimeMillis = lockRecheckTimeMillis;
this.lockWaitTimeoutMillis = lockWaitTimeoutMillis;
}
@Override
public void close() {
}
@Override
public String getId() {
return "mongo";
}
public boolean hasLock() {
return hasLock.get();
}
public void setHasLock(boolean hasLock) {
this.hasLock.set(hasLock);
}
}

View file

@ -1,33 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater;
import com.mongodb.DB;
import org.keycloak.models.KeycloakSession;
import org.keycloak.provider.Provider;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public interface MongoUpdaterProvider extends Provider {
void update(KeycloakSession session, DB db);
void validate(KeycloakSession session, DB db);
}

View file

@ -1,26 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater;
import org.keycloak.provider.ProviderFactory;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public interface MongoUpdaterProviderFactory extends ProviderFactory<MongoUpdaterProvider> {
}

View file

@ -1,49 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater;
import org.keycloak.provider.Provider;
import org.keycloak.provider.ProviderFactory;
import org.keycloak.provider.Spi;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class MongoUpdaterSpi implements Spi {
@Override
public boolean isInternal() {
return true;
}
@Override
public String getName() {
return "connectionsMongoUpdater";
}
@Override
public Class<? extends Provider> getProviderClass() {
return MongoUpdaterProvider.class;
}
@Override
public Class<? extends ProviderFactory> getProviderFactoryClass() {
return MongoUpdaterProviderFactory.class;
}
}

View file

@ -1,178 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl;
import com.mongodb.BasicDBObject;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import org.jboss.logging.Logger;
import org.keycloak.connections.mongo.updater.MongoUpdaterProvider;
import org.keycloak.connections.mongo.updater.impl.updates.Update;
import org.keycloak.connections.mongo.updater.impl.updates.Update1_0_0_Final;
import org.keycloak.connections.mongo.updater.impl.updates.Update1_1_0_Beta1;
import org.keycloak.connections.mongo.updater.impl.updates.Update1_2_0_Beta1;
import org.keycloak.connections.mongo.updater.impl.updates.Update1_2_0_CR1;
import org.keycloak.connections.mongo.updater.impl.updates.Update1_3_0;
import org.keycloak.connections.mongo.updater.impl.updates.Update1_4_0;
import org.keycloak.connections.mongo.updater.impl.updates.Update1_7_0;
import org.keycloak.connections.mongo.updater.impl.updates.Update1_8_0;
import org.keycloak.connections.mongo.updater.impl.updates.Update1_9_2;
import org.keycloak.connections.mongo.updater.impl.updates.Update2_3_0;
import org.keycloak.connections.mongo.updater.impl.updates.Update2_4_0;
import org.keycloak.connections.mongo.updater.impl.updates.Update2_5_0;
import org.keycloak.models.KeycloakSession;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class DefaultMongoUpdaterProvider implements MongoUpdaterProvider {
public static final Logger log = Logger.getLogger(DefaultMongoUpdaterProvider.class);
public static final String CHANGE_LOG_COLLECTION = "databaseChangeLog";
private Class<? extends Update>[] updates = new Class[]{
Update1_0_0_Final.class,
Update1_1_0_Beta1.class,
Update1_2_0_Beta1.class,
Update1_2_0_CR1.class,
Update1_3_0.class,
Update1_4_0.class,
Update1_7_0.class,
Update1_8_0.class,
Update1_9_2.class,
Update2_3_0.class,
Update2_4_0.class,
Update2_5_0.class
};
@Override
public void update(KeycloakSession session, DB db) {
log.debug("Starting database update");
try {
boolean changeLogExists = db.collectionExists(CHANGE_LOG_COLLECTION);
DBCollection changeLog = db.getCollection(CHANGE_LOG_COLLECTION);
List<String> executed = getExecuted(db, changeLogExists, changeLog);
List<Update> updatesToRun = getUpdatesToRun(executed);
if (!updatesToRun.isEmpty()) {
if (executed.isEmpty()) {
log.info("Initializing database schema");
} else {
if (log.isDebugEnabled()) {
log.debugv("Updating database from {0} to {1}", executed.get(executed.size() - 1), updatesToRun.get(updatesToRun.size() - 1).getId());
} else {
log.info("Updating database");
}
}
int order = executed.size();
for (Update u : updatesToRun) {
log.debugv("Executing updates for {0}", u.getId());
u.setLog(log);
u.setDb(db);
u.update(session);
createLog(changeLog, u, ++order);
log.debugv("Completed updates for {0}", u.getId());
}
log.debug("Completed database update");
} else {
log.debug("Skip database update. Database is already up to date");
}
} catch (Exception e) {
throw new RuntimeException("Failed to update database", e);
}
}
@Override
public void validate(KeycloakSession session, DB db) {
log.debug("Validating database");
boolean changeLogExists = db.collectionExists(CHANGE_LOG_COLLECTION);
DBCollection changeLog = db.getCollection(CHANGE_LOG_COLLECTION);
List<String> executed = getExecuted(db, changeLogExists, changeLog);
List<Update> updatesToRun = getUpdatesToRun(executed);
if (!updatesToRun.isEmpty()) {
String errorMessage = (executed.isEmpty())
? "Failed to validate Mongo database schema. Database is empty. Please change databaseSchema to 'update'"
: String.format("Failed to validate Mongo database schema. Schema needs updating database from %s to %s. Please change databaseSchema to 'update'",
executed.get(executed.size() - 1), updatesToRun.get(updatesToRun.size() - 1).getId());
throw new RuntimeException(errorMessage);
} else {
log.debug("Validation passed. Database is up to date");
}
}
private List<String> getExecuted(DB db, boolean changeLogExists, DBCollection changeLog) {
boolean realmExists = db.collectionExists("realms");
List<String> executed = new LinkedList<>();
if (!changeLogExists && realmExists) {
Update1_0_0_Final u = new Update1_0_0_Final();
executed.add(u.getId());
createLog(changeLog, u, 1);
} else if (changeLogExists) {
DBCursor cursor = changeLog.find().sort(new BasicDBObject("orderExecuted", 1));
while (cursor.hasNext()) {
executed.add((String) cursor.next().get("_id"));
}
}
return executed;
}
private List<Update> getUpdatesToRun(List<String> executed) {
try {
List<Update> updatesToRun = new LinkedList<>();
for (Class<? extends Update> updateClass : updates) {
Update u = updateClass.newInstance();
if (!executed.contains(u.getId())) {
updatesToRun.add(u);
}
}
return updatesToRun;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private void createLog(DBCollection changeLog, Update update, int orderExecuted) {
changeLog.insert(new BasicDBObject("_id", update.getId()).append("dateExecuted", new Date()).append("orderExecuted", orderExecuted));
}
@Override
public void close() {
}
}

View file

@ -1,53 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl;
import org.keycloak.Config;
import org.keycloak.connections.mongo.updater.MongoUpdaterProvider;
import org.keycloak.connections.mongo.updater.MongoUpdaterProviderFactory;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.KeycloakSessionFactory;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class DefaultMongoUpdaterProviderFactory implements MongoUpdaterProviderFactory {
@Override
public MongoUpdaterProvider create(KeycloakSession session) {
return new DefaultMongoUpdaterProvider();
}
@Override
public void init(Config.Scope config) {
}
@Override
public void postInit(KeycloakSessionFactory factory) {
}
@Override
public void close() {
}
@Override
public String getId() {
return "default";
}
}

View file

@ -1,167 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl.updates;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import org.jboss.logging.Logger;
import org.keycloak.storage.UserStorageProvider;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public abstract class AbstractMigrateUserFedToComponent extends Update {
private final Logger logger = Logger.getLogger(getClass());
public void portUserFedToComponent(String providerId) {
DBCollection realms = db.getCollection("realms");
DBCursor cursor = realms.find();
while (cursor.hasNext()) {
BasicDBObject realm = (BasicDBObject) cursor.next();
String realmId = realm.getString("_id");
Set<String> removedProviders = new HashSet<>();
BasicDBList componentEntities = (BasicDBList) realm.get("componentEntities");
BasicDBList federationProviders = (BasicDBList) realm.get("userFederationProviders");
for (Object obj : federationProviders) {
BasicDBObject fedProvider = (BasicDBObject)obj;
if (fedProvider.getString("providerName").equals(providerId)) {
String id = fedProvider.getString("id");
removedProviders.add(id);
int priority = fedProvider.getInt("priority");
String displayName = fedProvider.getString("displayName");
int fullSyncPeriod = fedProvider.getInt("fullSyncPeriod");
int changedSyncPeriod = fedProvider.getInt("changedSyncPeriod");
int lastSync = fedProvider.getInt("lastSync");
BasicDBObject component = new BasicDBObject();
component.put("id", id);
component.put("name", displayName);
component.put("providerType", UserStorageProvider.class.getName());
component.put("providerId", providerId);
component.put("parentId", realmId);
BasicDBObject config = new BasicDBObject();
config.put("priority", Collections.singletonList(Integer.toString(priority)));
config.put("fullSyncPeriod", Collections.singletonList(Integer.toString(fullSyncPeriod)));
config.put("changedSyncPeriod", Collections.singletonList(Integer.toString(changedSyncPeriod)));
config.put("lastSync", Collections.singletonList(Integer.toString(lastSync)));
BasicDBObject fedConfig = (BasicDBObject)fedProvider.get("config");
if (fedConfig != null) {
for (Map.Entry<String, Object> attr : new HashSet<>(fedConfig.entrySet())) {
String attrName = attr.getKey();
String attrValue = attr.getValue().toString();
config.put(attrName, Collections.singletonList(attrValue));
}
}
component.put("config", config);
componentEntities.add(component);
}
}
Iterator<Object> it = federationProviders.iterator();
while (it.hasNext()) {
BasicDBObject fedProvider = (BasicDBObject)it.next();
String id = fedProvider.getString("id");
if (removedProviders.contains(id)) {
it.remove();
}
}
realms.update(new BasicDBObject().append("_id", realmId), realm);
}
}
public void portUserFedMappersToComponent(String providerId, String mapperType) {
//logger.info("*** port mappers");
DBCollection realms = db.getCollection("realms");
DBCursor cursor = realms.find();
while (cursor.hasNext()) {
BasicDBObject realm = (BasicDBObject) cursor.next();
String realmId = realm.getString("_id");
Set<String> removedProviders = new HashSet<>();
BasicDBList componentEntities = (BasicDBList) realm.get("componentEntities");
BasicDBList federationProviders = (BasicDBList) realm.get("userFederationProviders");
BasicDBList fedMappers = (BasicDBList) realm.get("userFederationMappers");
for (Object obj : federationProviders) {
BasicDBObject fedProvider = (BasicDBObject)obj;
String providerName = fedProvider.getString("providerName");
//logger.info("looking for mappers of fed provider: " + providerName);
if (providerName.equals(providerId)) {
String id = fedProvider.getString("id");
//logger.info("found fed provider: " + id + ", looking at mappers");
for (Object obj2 : fedMappers) {
BasicDBObject fedMapper = (BasicDBObject)obj2;
String federationProviderId = fedMapper.getString("federationProviderId");
//logger.info("looking at mapper with federationProviderId: " + federationProviderId);
if (federationProviderId.equals(id)) {
String name = fedMapper.getString("name");
String mapperId = fedMapper.getString("id");
removedProviders.add(mapperId);
String mapperProviderId = fedMapper.getString("federationMapperType");
BasicDBObject component = new BasicDBObject();
component.put("id", mapperId);
component.put("name", name);
component.put("providerType", mapperType);
component.put("providerId", mapperProviderId);
component.put("parentId", id);
BasicDBObject fedConfig = (BasicDBObject)fedMapper.get("config");
BasicDBObject config = new BasicDBObject();
if (fedConfig != null) {
for (Map.Entry<String, Object> attr : new HashSet<>(fedConfig.entrySet())) {
String attrName = attr.getKey();
String attrValue = attr.getValue().toString();
config.put(attrName, Collections.singletonList(attrValue));
}
}
component.put("config", config);
componentEntities.add(component);
}
}
}
}
Iterator<Object> it = fedMappers.iterator();
while (it.hasNext()) {
BasicDBObject fedMapper = (BasicDBObject)it.next();
String id = fedMapper.getString("id");
if (removedProviders.contains(id)) {
it.remove();
}
}
realms.update(new BasicDBObject().append("_id", realmId), realm);
}
}
}

View file

@ -1,88 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl.updates;
import com.mongodb.BasicDBObject;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import org.jboss.logging.Logger;
import org.keycloak.models.KeycloakSession;
import java.util.Arrays;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public abstract class Update {
protected DB db;
protected Logger log;
public abstract String getId();
public abstract void update(KeycloakSession session) throws ClassNotFoundException;
protected DBCollection createCollection(String name) {
if (db.collectionExists(name)) {
throw new RuntimeException("Failed to create collection {0}: collection already exists");
}
DBCollection col = db.getCollection(name);
log.debugv("Created collection {0}", name);
return col;
}
protected void ensureIndex(String name, String field, boolean unique, boolean sparse) {
ensureIndex(name, new String[]{field}, unique, sparse);
}
protected void ensureIndex(String name, String[] fields, boolean unique, boolean sparse) {
DBCollection col = db.getCollection(name);
BasicDBObject o = new BasicDBObject();
for (String f : fields) {
o.append(f, 1);
}
col.createIndex(o, new BasicDBObject("unique", unique).append("sparse", sparse));
log.debugv("Created index {0}, fields={1}, unique={2}, sparse={3}", name, Arrays.toString(fields), unique, sparse);
}
protected void deleteEntries(String collection) {
db.getCollection(collection).remove(new BasicDBObject());
log.debugv("Deleted entries from {0}", collection);
}
protected void removeField(String collection, String field) {
db.getCollection(collection).update(new BasicDBObject(), new BasicDBObject("$unset" , new BasicDBObject(field, 1)), false, true);
}
protected void renameCollection(String collection, String newName) {
db.getCollection(collection).rename(newName);
}
public void setLog(Logger log) {
this.log = log;
}
public void setDb(DB db) {
this.db = db;
}
}

View file

@ -1,62 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl.updates;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import org.keycloak.connections.mongo.updater.impl.DefaultMongoUpdaterProvider;
import org.keycloak.models.KeycloakSession;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class Update1_0_0_Final extends Update {
@Override
public String getId() {
return "1.0.0.Final";
}
@Override
public void update(KeycloakSession session) throws ClassNotFoundException {
DBCollection realmsCollection = db.getCollection("realms");
realmsCollection.createIndex(new BasicDBObject("name", 1), new BasicDBObject("unique", true));
DefaultMongoUpdaterProvider.log.debugv("Created collection {0}", "realms");
createCollection("users");
ensureIndex("users", new String[] { "realmId", "username"}, true, false);
ensureIndex("users", "emailIndex", true, true);
createCollection("roles");
ensureIndex("roles", "nameIndex", true, false);
createCollection("applications");
ensureIndex("applications", new String[]{"realmId", "name"}, true, false);
createCollection("oauthClients");
ensureIndex("oauthClients", new String[] { "realmId", "name"}, true, false);
createCollection("userFailures");
createCollection("sessions");
createCollection("clientSessions");
}
}

View file

@ -1,61 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl.updates;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.mongodb.QueryBuilder;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.utils.KeycloakModelUtils;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class Update1_1_0_Beta1 extends Update {
@Override
public String getId() {
return "1.1.0.Beta1";
}
@Override
public void update(KeycloakSession session) {
deleteEntries("clientSessions");
deleteEntries("sessions");
addRealmCodeSecret();
}
private void addRealmCodeSecret() {
DBCollection realms = db.getCollection("realms");
DBObject query = new QueryBuilder()
.and("codeSecret").is(null).get();
DBCursor objects = realms.find(query);
while (objects.hasNext()) {
DBObject object = objects.next();
object.put("codeSecret", KeycloakModelUtils.generateCodeSecret());
realms.save(object);
log.debugv("Added realm.codeSecret, id={0}", object.get("id"));
}
}
}

View file

@ -1,297 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl.updates;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.BasicDBObjectBuilder;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import org.keycloak.Config;
import org.keycloak.connections.mongo.impl.types.MapMapper;
import org.keycloak.migration.MigrationProvider;
import org.keycloak.models.AdminRoles;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.utils.KeycloakModelUtils;
import org.keycloak.representations.idm.ProtocolMapperRepresentation;
import java.util.List;
import java.util.Map;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class Update1_2_0_Beta1 extends Update {
@Override
public String getId() {
return "1.2.0.Beta1";
}
@Override
public void update(KeycloakSession session) {
deleteEntries("clientSessions");
deleteEntries("sessions");
convertSocialToIdFedRealms();
convertSocialToIdFedUsers();
addAccessCodeLoginTimeout();
addNewAdminRoles();
addDefaultProtocolMappers(session);
}
private void convertSocialToIdFedRealms() {
DBCollection realms = db.getCollection("realms");
DBCursor realmsCursor = realms.find();
try {
while (realmsCursor.hasNext()) {
BasicDBObject realm = (BasicDBObject) realmsCursor.next();
boolean updateProfileOnInitialSocialLogin = realm.getBoolean("updateProfileOnInitialSocialLogin");
BasicDBObject socialConfig = (BasicDBObject) realm.get("socialConfig");
BasicDBList identityProviders = (BasicDBList) realm.get("identityProviders");
if (identityProviders == null) {
identityProviders = new BasicDBList();
realm.put("identityProviders", identityProviders);
}
if (socialConfig != null) {
for (Map.Entry<String, Object> entry : socialConfig.entrySet()) {
if (entry.getKey().endsWith("###key")) {
String socialProviderId = entry.getKey().substring(0, entry.getKey().indexOf("###"));
String clientId = (String) entry.getValue();
String clientSecret = socialConfig.getString(socialProviderId + "###secret");
DBObject identityProviderConfig = new BasicDBObjectBuilder()
.add("clientId", clientId)
.add("clientSecret", clientSecret).get();
DBObject identityProvider = new BasicDBObjectBuilder()
.add("internalId", KeycloakModelUtils.generateId())
.add("providerId", socialProviderId)
.add("alias", socialProviderId)
.add("updateProfileFirstLogin", updateProfileOnInitialSocialLogin)
.add("enabled", true)
.add("storeToken", false)
.add("authenticateByDefault", false)
.add("config", identityProviderConfig).get();
identityProviders.add(identityProvider);
log.debugv("Converted social provider {0} to identity provider", socialProviderId);
}
}
}
// Remove obsolete keys from realm
realm.remove("social");
realm.remove("updateProfileOnInitialSocialLogin");
realm.remove("socialConfig");
// Update realm in DB now
realms.save(realm);
log.debugv("Social providers of realm {0} converted to identity providers", realm.get("_id"));
}
} finally {
realmsCursor.close();
}
}
private void convertSocialToIdFedUsers() {
DBCollection users = db.getCollection("users");
DBCursor usersCursor = users.find();
try {
while (usersCursor.hasNext()) {
BasicDBObject user = (BasicDBObject) usersCursor.next();
BasicDBList socialLinks = (BasicDBList) user.get("socialLinks");
if (socialLinks != null) {
BasicDBList federatedIdentities = (BasicDBList) user.get("federatedIdentities");
if (federatedIdentities == null) {
federatedIdentities = new BasicDBList();
user.put("federatedIdentities", federatedIdentities);
}
for (Object socialLinkObj : socialLinks) {
BasicDBObject socialLink = (BasicDBObject) socialLinkObj;
BasicDBObject idFedLink = new BasicDBObject();
idFedLink.put("userName", socialLink.get("socialUsername"));
idFedLink.put("userId", socialLink.get("socialUserId"));
idFedLink.put("identityProvider", socialLink.get("socialProvider"));
federatedIdentities.add(idFedLink);
}
// Remove obsolete keys and save user
user.remove("socialLinks");
users.save(user);
if (log.isTraceEnabled()) {
log.tracev("Social links of user {0} converted to identity links", user.get("_id"));
}
}
}
} finally {
usersCursor.close();
}
log.debug("Social links of users converted to identity links");
}
private void addAccessCodeLoginTimeout() {
DBCollection realms = db.getCollection("realms");
DBCursor realmsCursor = realms.find();
try {
while (realmsCursor.hasNext()) {
BasicDBObject realm = (BasicDBObject) realmsCursor.next();
realm.put("accessCodeLifespanLogin", 1800);
realms.save(realm);
}
} finally {
realmsCursor.close();
}
}
private void addNewAdminRoles() {
DBCollection realms = db.getCollection("realms");
String adminRealmName = Config.getAdminRealm();
DBCursor realmsCursor = realms.find();
try {
while (realmsCursor.hasNext()) {
BasicDBObject realm = (BasicDBObject) realmsCursor.next();
if (adminRealmName.equals(realm.get("name"))) {
addNewAdminRolesToMasterRealm(realm);
} else {
addNewAdminRolesToRealm(realm);
}
}
} finally {
realmsCursor.close();
}
}
private void addNewAdminRolesToMasterRealm(BasicDBObject adminRealm) {
DBCollection realms = db.getCollection("realms");
DBCollection applications = db.getCollection("applications");
DBCollection roles = db.getCollection("roles");
DBCursor realmsCursor = realms.find();
try {
while (realmsCursor.hasNext()) {
BasicDBObject currentRealm = (BasicDBObject) realmsCursor.next();
String masterAdminAppName = currentRealm.getString("name") + "-realm";
BasicDBObject masterAdminApp = (BasicDBObject) applications.findOne(new BasicDBObject().append("realmId", adminRealm.get("_id")).append("name", masterAdminAppName));
String viewIdProvidersRoleId = insertApplicationRole(roles, AdminRoles.VIEW_IDENTITY_PROVIDERS, masterAdminApp.getString("_id"));
String manageIdProvidersRoleId = insertApplicationRole(roles, AdminRoles.MANAGE_IDENTITY_PROVIDERS, masterAdminApp.getString("_id"));
BasicDBObject adminRole = (BasicDBObject) roles.findOne(new BasicDBObject().append("realmId", adminRealm.get("_id")).append("name", AdminRoles.ADMIN));
BasicDBList adminCompositeRoles = (BasicDBList) adminRole.get("compositeRoleIds");
adminCompositeRoles.add(viewIdProvidersRoleId);
adminCompositeRoles.add(manageIdProvidersRoleId);
roles.save(adminRole);
log.debugv("Added roles {0} and {1} to application {2}", AdminRoles.VIEW_IDENTITY_PROVIDERS, AdminRoles.MANAGE_IDENTITY_PROVIDERS, masterAdminAppName);
}
} finally {
realmsCursor.close();
}
}
private void addNewAdminRolesToRealm(BasicDBObject currentRealm) {
DBCollection applications = db.getCollection("applications");
DBCollection roles = db.getCollection("roles");
BasicDBObject adminApp = (BasicDBObject) applications.findOne(new BasicDBObject().append("realmId", currentRealm.get("_id")).append("name", "realm-management"));
String viewIdProvidersRoleId = insertApplicationRole(roles, AdminRoles.VIEW_IDENTITY_PROVIDERS, adminApp.getString("_id"));
String manageIdProvidersRoleId = insertApplicationRole(roles, AdminRoles.MANAGE_IDENTITY_PROVIDERS, adminApp.getString("_id"));
BasicDBObject adminRole = (BasicDBObject) roles.findOne(new BasicDBObject().append("applicationId", adminApp.get("_id")).append("name", AdminRoles.REALM_ADMIN));
BasicDBList adminCompositeRoles = (BasicDBList) adminRole.get("compositeRoleIds");
adminCompositeRoles.add(viewIdProvidersRoleId);
adminCompositeRoles.add(manageIdProvidersRoleId);
roles.save(adminRole);
log.debugv("Added roles {0} and {1} to application realm-management of realm {2}", AdminRoles.VIEW_IDENTITY_PROVIDERS, AdminRoles.MANAGE_IDENTITY_PROVIDERS, currentRealm.get("name"));
}
private void addDefaultProtocolMappers(KeycloakSession session) {
addDefaultMappers(session, db.getCollection("applications"));
addDefaultMappers(session, db.getCollection("oauthClients"));
}
private void addDefaultMappers(KeycloakSession session, DBCollection clients) {
DBCursor clientsCursor = clients.find();
try {
while (clientsCursor.hasNext()) {
BasicDBObject currentClient = (BasicDBObject) clientsCursor.next();
BasicDBList dbProtocolMappers = new BasicDBList();
currentClient.put("protocolMappers", dbProtocolMappers);
Object claimMask = currentClient.get("allowedClaimsMask");
MigrationProvider migrationProvider = session.getProvider(MigrationProvider.class);
List<ProtocolMapperRepresentation> protocolMappers = migrationProvider.getMappersForClaimMask((Long) claimMask);
for (ProtocolMapperRepresentation protocolMapper : protocolMappers) {
BasicDBObject dbMapper = new BasicDBObject();
dbMapper.put("id", KeycloakModelUtils.generateId());
dbMapper.put("protocol", protocolMapper.getProtocol());
dbMapper.put("name", protocolMapper.getName());
dbMapper.put("consentRequired", protocolMapper.isConsentRequired());
dbMapper.put("consentText", protocolMapper.getConsentText());
dbMapper.put("protocolMapper", protocolMapper.getProtocolMapper());
Map<String, String> config = protocolMapper.getConfig();
BasicDBObject dbConfig = MapMapper.convertMap(config, null);
dbMapper.put("config", dbConfig);
dbProtocolMappers.add(dbMapper);
}
// Remove obsolete keys from client
currentClient.remove("allowedClaimsMask");
log.debugv("Added default mappers to application {1}", currentClient.get("name"));
clients.save(currentClient);
}
} finally {
clientsCursor.close();
}
}
private String insertApplicationRole(DBCollection roles, String roleName, String applicationId) {
BasicDBObject role = new BasicDBObject();
String roleId = KeycloakModelUtils.generateId();
role.append("_id", roleId);
role.append("name", roleName);
role.append("applicationId", applicationId);
role.append("nameIndex", applicationId + "//" + roleName);
roles.insert(role);
return roleId;
}
}

View file

@ -1,81 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl.updates;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import org.keycloak.models.KeycloakSession;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class Update1_2_0_CR1 extends Update {
@Override
public String getId() {
return "1.2.0.CR1";
}
@Override
public void update(KeycloakSession session) {
deleteEntries("clientSessions");
deleteEntries("sessions");
convertApplicationsToClients();
convertOAuthClientsToClients();
db.getCollection("realms").update(new BasicDBObject(), new BasicDBObject("$rename", new BasicDBObject("adminAppId", "masterAdminClient")), false, true);
ensureIndex("userConsents", new String[]{"clientId", "userId"}, true, false);
}
private void convertApplicationsToClients() {
DBCollection applications = db.getCollection("applications");
applications.dropIndex("realmId_1_name_1");
applications.update(new BasicDBObject(), new BasicDBObject("$set", new BasicDBObject("consentRequired", false)), false, true);
applications.update(new BasicDBObject(), new BasicDBObject("$rename", new BasicDBObject("name", "clientId")), false, true);
renameCollection("applications", "clients");
log.debugv("Converted applications to clients");
DBCollection roles = db.getCollection("roles");
roles.update(new BasicDBObject(), new BasicDBObject("$rename", new BasicDBObject("applicationId", "clientId")), false, true);
log.debugv("Renamed roles.applicationId to roles.clientId");
ensureIndex("clients", new String[]{"realmId", "clientId"}, true, false);
}
private void convertOAuthClientsToClients() {
DBCollection clients = db.getCollection("clients");
DBCollection oauthClients = db.getCollection("oauthClients");
oauthClients.dropIndex("realmId_1_name_1");
oauthClients.update(new BasicDBObject(), new BasicDBObject("$rename", new BasicDBObject("name", "clientId")), false, true);
oauthClients.update(new BasicDBObject(), new BasicDBObject("$set", new BasicDBObject("consentRequired", true)), false, true);
DBCursor curs = oauthClients.find();
while (curs.hasNext()) {
clients.insert(curs.next());
}
oauthClients.drop();
log.debugv("Converted oauthClients to clients");
}
}

View file

@ -1,76 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl.updates;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import org.keycloak.models.KeycloakSession;
import org.keycloak.representations.idm.IdentityProviderRepresentation;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class Update1_3_0 extends Update {
@Override
public String getId() {
return "1.3.0";
}
@Override
public void update(KeycloakSession session) {
deleteEntries("clientSessions");
deleteEntries("sessions");
removeField("realms", "passwordCredentialGrantAllowed");
updateIdentityProviders();
}
private void updateIdentityProviders() {
DBCollection realms = db.getCollection("realms");
DBCursor realmsCursor = realms.find();
try {
while (realmsCursor.hasNext()) {
BasicDBObject realm = (BasicDBObject) realmsCursor.next();
BasicDBList identityProviders = (BasicDBList) realm.get("identityProviders");
if (identityProviders != null) {
for (Object ipObj : identityProviders) {
BasicDBObject identityProvider = (BasicDBObject) ipObj;
boolean updateProfileFirstLogin = identityProvider.getBoolean("updateProfileFirstLogin");
String upflMode = updateProfileFirstLogin ? IdentityProviderRepresentation.UPFLM_ON : IdentityProviderRepresentation.UPFLM_OFF;
identityProvider.put("updateProfileFirstLoginMode", upflMode);
identityProvider.removeField("updateProfileFirstLogin");
identityProvider.put("trustEmail", false);
}
}
realms.save(realm);
}
} finally {
realmsCursor.close();
}
}
}

View file

@ -1,79 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl.updates;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import org.keycloak.models.KeycloakSession;
import java.util.HashSet;
import java.util.Map;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class Update1_4_0 extends Update {
@Override
public String getId() {
return "1.4.0";
}
@Override
public void update(KeycloakSession session) throws ClassNotFoundException {
deleteEntries("clientSessions");
deleteEntries("sessions");
// Remove warning
removeField("realms", "authenticators");
updateUserAttributes();
}
private void updateUserAttributes() {
DBCollection users = db.getCollection("users");
DBCursor usersCursor = users.find();
try {
while (usersCursor.hasNext()) {
BasicDBObject user = (BasicDBObject) usersCursor.next();
BasicDBObject attributes = (BasicDBObject) user.get("attributes");
if (attributes != null) {
for (Map.Entry<String, Object> attr : new HashSet<>(attributes.entrySet())) {
String attrName = attr.getKey();
Object attrValue = attr.getValue();
if (attrValue != null && attrValue instanceof String) {
BasicDBList asList = new BasicDBList();
asList.add(attrValue);
attributes.put(attrName, asList);
}
}
user.put("attributes", attributes);
users.save(user);
}
}
} finally {
usersCursor.close();
}
}
}

View file

@ -1,56 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl.updates;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import org.keycloak.models.KeycloakSession;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class Update1_7_0 extends Update {
@Override
public String getId() {
return "1.7.0";
}
@Override
public void update(KeycloakSession session) throws ClassNotFoundException {
DBCollection clients = db.getCollection("clients");
DBCursor clientsCursor = clients.find();
try {
while (clientsCursor.hasNext()) {
BasicDBObject client = (BasicDBObject) clientsCursor.next();
boolean directGrantsOnly = client.getBoolean("directGrantsOnly", false);
client.append("standardFlowEnabled", !directGrantsOnly);
client.append("implicitFlowEnabled", false);
client.append("directAccessGrantsEnabled", directGrantsOnly);
client.removeField("directGrantsOnly");
clients.save(client);
}
} finally {
clientsCursor.close();
}
}
}

View file

@ -1,61 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl.updates;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.WriteResult;
import org.keycloak.credential.hash.Pbkdf2PasswordHashProvider;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.UserCredentialModel;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class Update1_8_0 extends Update {
@Override
public String getId() {
return "1.8.0";
}
@Override
public void update(KeycloakSession session) {
BasicDBList orArgs = new BasicDBList();
orArgs.add(new BasicDBObject("type", UserCredentialModel.PASSWORD));
orArgs.add(new BasicDBObject("type", UserCredentialModel.PASSWORD_HISTORY));
BasicDBObject elemMatch = new BasicDBObject("$or", orArgs);
elemMatch.put("algorithm", new BasicDBObject("$exists", false));
BasicDBObject query = new BasicDBObject("credentials", new BasicDBObject("$elemMatch", elemMatch));
BasicDBObject update = new BasicDBObject("$set", new BasicDBObject("credentials.$.algorithm", Pbkdf2PasswordHashProvider.ID));
DBCollection users = db.getCollection("users");
// Not sure how to do in single query
int countModified = 1;
while (countModified > 0) {
WriteResult wr = users.update(query, update, false, true);
countModified = wr.getN();
log.debugf("%d credentials modified in current iteration during upgrade to 1.8", countModified);
}
}
}

View file

@ -1,62 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl.updates;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.WriteResult;
import org.keycloak.credential.hash.Pbkdf2PasswordHashProvider;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.UserCredentialModel;
import org.keycloak.models.utils.HmacOTP;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class Update1_9_2 extends Update {
@Override
public String getId() {
return "1.9.2";
}
@Override
public void update(KeycloakSession session) {
BasicDBList orArgs = new BasicDBList();
orArgs.add(new BasicDBObject("type", UserCredentialModel.PASSWORD));
orArgs.add(new BasicDBObject("type", UserCredentialModel.PASSWORD_HISTORY));
BasicDBObject elemMatch = new BasicDBObject("$or", orArgs);
elemMatch.put("algorithm", HmacOTP.HMAC_SHA1);
BasicDBObject query = new BasicDBObject("credentials", new BasicDBObject("$elemMatch", elemMatch));
BasicDBObject update = new BasicDBObject("$set", new BasicDBObject("credentials.$.algorithm", Pbkdf2PasswordHashProvider.ID));
DBCollection users = db.getCollection("users");
// Not sure how to do in single query
int countModified = 1;
while (countModified > 0) {
WriteResult wr = users.update(query, update, false, true);
countModified = wr.getN();
log.debugf("%d credentials modified in current iteration during upgrade to 1.8", countModified);
}
}
}

View file

@ -1,80 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl.updates;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import org.keycloak.keys.KeyProvider;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.mongo.keycloak.entities.ComponentEntity;
import org.keycloak.models.utils.KeycloakModelUtils;
import java.util.Collections;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class Update2_3_0 extends Update {
@Override
public String getId() {
return "2.3.0";
}
@Override
public void update(KeycloakSession session) {
DBCollection realms = db.getCollection("realms");
DBCursor cursor = realms.find();
while (cursor.hasNext()) {
BasicDBObject realm = (BasicDBObject) cursor.next();
String realmId = realm.getString("_id");
String privateKeyPem = realm.getString("privateKeyPem");
String certificatePem = realm.getString("certificatePem");
BasicDBList entities = (BasicDBList) realm.get("componentEntities");
BasicDBObject component = new BasicDBObject();
component.put("id", KeycloakModelUtils.generateId());
component.put("name", "rsa");
component.put("providerType", KeyProvider.class.getName());
component.put("providerId", "rsa");
component.put("parentId", realmId);
BasicDBObject config = new BasicDBObject();
config.put("priority", Collections.singletonList("100"));
config.put("privateKey", Collections.singletonList(privateKeyPem));
config.put("certificate", Collections.singletonList(certificatePem));
component.put("config", config);
entities.add(component);
realm.remove("privateKeyPem");
realm.remove("certificatePem");
realm.remove("publicKeyPem");
realm.remove("codeSecret");
realms.update(new BasicDBObject().append("_id", realmId), realm);
}
}
}

View file

@ -1,53 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl.updates;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import org.jboss.logging.Logger;
import org.keycloak.keys.KeyProvider;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.LDAPConstants;
import org.keycloak.models.utils.KeycloakModelUtils;
import org.keycloak.storage.UserStorageProvider;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class Update2_4_0 extends AbstractMigrateUserFedToComponent {
@Override
public String getId() {
return "2.4.0";
}
@Override
public void update(KeycloakSession session) {
portUserFedMappersToComponent(LDAPConstants.LDAP_PROVIDER, "org.keycloak.storage.ldap.mappers.LDAPStorageMapper");
portUserFedToComponent(LDAPConstants.LDAP_PROVIDER);
}
}

View file

@ -1,57 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.mongo.updater.impl.updates;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import org.keycloak.models.KeycloakSession;
import org.keycloak.provider.ProviderFactory;
import org.keycloak.storage.UserStorageProvider;
import java.util.List;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class Update2_5_0 extends AbstractMigrateUserFedToComponent {
@Override
public String getId() {
return "2.5.0";
}
@Override
public void update(KeycloakSession session) {
List<ProviderFactory> factories = session.getKeycloakSessionFactory().getProviderFactories(UserStorageProvider.class);
for (ProviderFactory factory : factories) {
portUserFedToComponent(factory.getId());
}
DBCollection realms = db.getCollection("realms");
try (DBCursor realmsCursor = realms.find()) {
while (realmsCursor.hasNext()) {
BasicDBObject realm = (BasicDBObject) realmsCursor.next();
realm.append("loginWithEmailAllowed", true);
realm.append("duplicateEmailsAllowed", false);
realms.save(realm);
}
}
}
}

View file

@ -1,149 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.events.mongo;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import org.keycloak.events.admin.AdminEvent;
import org.keycloak.events.admin.AdminEventQuery;
import org.keycloak.events.admin.OperationType;
import org.keycloak.events.admin.ResourceType;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Pattern;
public class MongoAdminEventQuery implements AdminEventQuery{
private Integer firstResult;
private Integer maxResults;
private DBCollection audit;
private final BasicDBObject query;
public MongoAdminEventQuery(DBCollection audit) {
this.audit = audit;
query = new BasicDBObject();
}
@Override
public AdminEventQuery realm(String realmId) {
query.put("realmId", realmId);
return this;
}
@Override
public AdminEventQuery operation(OperationType... operations) {
List<String> operationStrings = new LinkedList<String>();
for (OperationType e : operations) {
operationStrings.add(e.toString());
}
query.put("operationType", new BasicDBObject("$in", operationStrings));
return this;
}
@Override
public AdminEventQuery resourceType(ResourceType... resourceTypes) {
List<String> resourceTypeStrings = new LinkedList<String>();
for (ResourceType e : resourceTypes) {
resourceTypeStrings.add(e.toString());
}
query.put("resourceType", new BasicDBObject("$in", resourceTypeStrings));
return this;
}
@Override
public AdminEventQuery authRealm(String authRealmId) {
query.put("authRealmId", authRealmId);
return this;
}
@Override
public AdminEventQuery authClient(String authClientId) {
query.put("authClientId", authClientId);
return this;
}
@Override
public AdminEventQuery authUser(String authUserId) {
query.put("authUserId", authUserId);
return this;
}
@Override
public AdminEventQuery authIpAddress(String ipAddress) {
query.put("authIpAddress", ipAddress);
return this;
}
@Override
public AdminEventQuery resourcePath(String resourcePath) {
query.put("resourcePath", Pattern.compile(resourcePath));
return this;
}
@Override
public AdminEventQuery fromTime(Date fromTime) {
BasicDBObject time = query.containsField("time") ? (BasicDBObject) query.get("time") : new BasicDBObject();
time.append("$gte", fromTime.getTime());
query.put("time", time);
return this;
}
@Override
public AdminEventQuery toTime(Date toTime) {
BasicDBObject time = query.containsField("time") ? (BasicDBObject) query.get("time") : new BasicDBObject();
time.append("$lte", toTime.getTime());
query.put("time", time);
return this;
}
@Override
public AdminEventQuery firstResult(int firstResult) {
this.firstResult = firstResult;
return this;
}
@Override
public AdminEventQuery maxResults(int maxResults) {
this.maxResults = maxResults;
return this;
}
@Override
public List<AdminEvent> getResultList() {
DBCursor cur = audit.find(query).sort(new BasicDBObject("time", -1));
if (firstResult != null) {
cur.skip(firstResult);
}
if (maxResults != null) {
cur.limit(maxResults);
}
List<AdminEvent> events = new LinkedList<AdminEvent>();
while (cur.hasNext()) {
events.add(MongoEventStoreProvider.convertAdminEvent((BasicDBObject) cur.next()));
}
return events;
}
}

View file

@ -1,126 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.events.mongo;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import org.keycloak.events.Event;
import org.keycloak.events.EventQuery;
import org.keycloak.events.EventType;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class MongoEventQuery implements EventQuery {
private Integer firstResult;
private Integer maxResults;
private DBCollection audit;
private final BasicDBObject query;
public MongoEventQuery(DBCollection audit) {
this.audit = audit;
query = new BasicDBObject();
}
@Override
public EventQuery type(EventType... types) {
List<String> eventStrings = new LinkedList<String>();
for (EventType e : types) {
eventStrings.add(e.toString());
}
query.put("type", new BasicDBObject("$in", eventStrings));
return this;
}
@Override
public EventQuery realm(String realmId) {
query.put("realmId", realmId);
return this;
}
@Override
public EventQuery client(String clientId) {
query.put("clientId", clientId);
return this;
}
@Override
public EventQuery user(String userId) {
query.put("userId", userId);
return this;
}
@Override
public EventQuery fromDate(Date fromDate) {
BasicDBObject time = query.containsField("time") ? (BasicDBObject) query.get("time") : new BasicDBObject();
time.append("$gte", fromDate.getTime());
query.put("time", time);
return this;
}
@Override
public EventQuery toDate(Date toDate) {
BasicDBObject time = query.containsField("time") ? (BasicDBObject) query.get("time") : new BasicDBObject();
time.append("$lte", toDate.getTime());
query.put("time", time);
return this;
}
@Override
public EventQuery ipAddress(String ipAddress) {
query.put("ipAddress", ipAddress);
return this;
}
@Override
public EventQuery firstResult(int firstResult) {
this.firstResult = firstResult;
return this;
}
@Override
public EventQuery maxResults(int maxResults) {
this.maxResults = maxResults;
return this;
}
@Override
public List<Event> getResultList() {
DBCursor cur = audit.find(query).sort(new BasicDBObject("time", -1));
if (firstResult != null) {
cur.skip(firstResult);
}
if (maxResults != null) {
cur.limit(maxResults);
}
List<Event> events = new LinkedList<Event>();
while (cur.hasNext()) {
events.add(MongoEventStoreProvider.convertEvent((BasicDBObject) cur.next()));
}
return events;
}
}

View file

@ -1,204 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.events.mongo;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.DBObject;
import org.keycloak.events.Event;
import org.keycloak.events.EventQuery;
import org.keycloak.events.EventStoreProvider;
import org.keycloak.events.EventType;
import org.keycloak.events.admin.AdminEvent;
import org.keycloak.events.admin.AdminEventQuery;
import org.keycloak.events.admin.AuthDetails;
import org.keycloak.events.admin.OperationType;
import org.keycloak.events.admin.ResourceType;
import java.util.HashMap;
import java.util.Map;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class MongoEventStoreProvider implements EventStoreProvider {
private DBCollection events;
private DBCollection adminEvents;
public MongoEventStoreProvider(DBCollection events, DBCollection adminEvents) {
this.events = events;
this.adminEvents = adminEvents;
}
@Override
public EventQuery createQuery() {
return new MongoEventQuery(events);
}
@Override
public void clear() {
events.remove(new BasicDBObject());
}
@Override
public void clear(String realmId) {
events.remove(new BasicDBObject("realmId", realmId));
}
@Override
public void clear(String realmId, long olderThan) {
BasicDBObject q = new BasicDBObject();
q.put("realmId", realmId);
q.put("time", new BasicDBObject("$lt", olderThan));
events.remove(q);
}
@Override
public void onEvent(Event event) {
events.insert(convertEvent(event));
}
@Override
public AdminEventQuery createAdminQuery() {
return new MongoAdminEventQuery(adminEvents);
}
@Override
public void clearAdmin() {
adminEvents.remove(new BasicDBObject());
}
@Override
public void clearAdmin(String realmId) {
adminEvents.remove(new BasicDBObject("realmId", realmId));
}
@Override
public void clearAdmin(String realmId, long olderThan) {
BasicDBObject q = new BasicDBObject();
q.put("realmId", realmId);
q.put("time", new BasicDBObject("$lt", olderThan));
adminEvents.remove(q);
}
@Override
public void onEvent(AdminEvent adminEvent, boolean includeRepresentation) {
adminEvents.insert(convertAdminEvent(adminEvent, includeRepresentation));
}
@Override
public void close() {
}
static DBObject convertEvent(Event event) {
BasicDBObject e = new BasicDBObject();
e.put("time", event.getTime());
e.put("type", event.getType().toString());
e.put("realmId", event.getRealmId());
e.put("clientId", event.getClientId());
e.put("userId", event.getUserId());
e.put("sessionId", event.getSessionId());
e.put("ipAddress", event.getIpAddress());
e.put("error", event.getError());
BasicDBObject details = new BasicDBObject();
if (event.getDetails() != null) {
for (Map.Entry<String, String> entry : event.getDetails().entrySet()) {
details.put(entry.getKey(), entry.getValue());
}
}
e.put("details", details);
return e;
}
static Event convertEvent(BasicDBObject o) {
Event event = new Event();
event.setTime(o.getLong("time"));
event.setType(EventType.valueOf(o.getString("type")));
event.setRealmId(o.getString("realmId"));
event.setClientId(o.getString("clientId"));
event.setUserId(o.getString("userId"));
event.setSessionId(o.getString("sessionId"));
event.setIpAddress(o.getString("ipAddress"));
event.setError(o.getString("error"));
BasicDBObject d = (BasicDBObject) o.get("details");
if (d != null) {
Map<String, String> details = new HashMap<String, String>();
for (Object k : d.keySet()) {
details.put((String) k, d.getString((String) k));
}
event.setDetails(details);
}
return event;
}
private static DBObject convertAdminEvent(AdminEvent adminEvent, boolean includeRepresentation) {
BasicDBObject e = new BasicDBObject();
e.put("time", adminEvent.getTime());
e.put("realmId", adminEvent.getRealmId());
e.put("operationType", adminEvent.getOperationType().toString());
setAuthDetails(e, adminEvent.getAuthDetails());
e.put("resourcePath", adminEvent.getResourcePath());
e.put("error", adminEvent.getError());
if(includeRepresentation) {
e.put("representation", adminEvent.getRepresentation());
}
return e;
}
static AdminEvent convertAdminEvent(BasicDBObject o) {
AdminEvent adminEvent = new AdminEvent();
adminEvent.setTime(o.getLong("time"));
adminEvent.setRealmId(o.getString("realmId"));
adminEvent.setOperationType(OperationType.valueOf(o.getString("operationType")));
if (o.getString("resourceType") != null) {
adminEvent.setResourceType(ResourceType.valueOf(o.getString("resourceType")));
}
setAuthDetails(adminEvent, o);
adminEvent.setResourcePath(o.getString("resourcePath"));
adminEvent.setError(o.getString("error"));
if(o.getString("representation") != null) {
adminEvent.setRepresentation(o.getString("representation"));
}
return adminEvent;
}
private static void setAuthDetails(BasicDBObject e, AuthDetails authDetails) {
e.put("authRealmId", authDetails.getRealmId());
e.put("authClientId", authDetails.getClientId());
e.put("authUserId", authDetails.getUserId());
e.put("authIpAddress", authDetails.getIpAddress());
}
private static void setAuthDetails(AdminEvent adminEvent, BasicDBObject o) {
AuthDetails authDetails = new AuthDetails();
authDetails.setRealmId(o.getString("authRealmId"));
authDetails.setClientId(o.getString("authClientId"));
authDetails.setUserId(o.getString("authUserId"));
authDetails.setIpAddress(o.getString("authIpAddress"));
adminEvent.setAuthDetails(authDetails);
}
}

View file

@ -1,70 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.events.mongo;
import com.mongodb.DBCollection;
import com.mongodb.WriteConcern;
import org.jboss.logging.Logger;
import org.keycloak.Config;
import org.keycloak.connections.mongo.MongoConnectionProvider;
import org.keycloak.events.EventStoreProvider;
import org.keycloak.events.EventStoreProviderFactory;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.KeycloakSessionFactory;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class MongoEventStoreProviderFactory implements EventStoreProviderFactory {
protected static final Logger logger = Logger.getLogger(MongoEventStoreProviderFactory.class);
public static final String ID = "mongo";
@Override
public EventStoreProvider create(KeycloakSession session) {
MongoConnectionProvider connection = session.getProvider(MongoConnectionProvider.class);
DBCollection collection = connection.getDB().getCollection("events");
DBCollection adminCollection = connection.getDB().getCollection("adminEvents");
collection.setWriteConcern(WriteConcern.UNACKNOWLEDGED);
adminCollection.setWriteConcern(WriteConcern.UNACKNOWLEDGED);
return new MongoEventStoreProvider(collection, adminCollection);
}
@Override
public void init(Config.Scope config) {
}
@Override
public void postInit(KeycloakSessionFactory factory) {
}
@Override
public void close() {
}
@Override
public String getId() {
return ID;
}
}

View file

@ -1,61 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.adapters;
import org.keycloak.connections.mongo.api.MongoIdentifiableEntity;
import org.keycloak.connections.mongo.api.MongoStore;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public abstract class AbstractMongoAdapter<T extends MongoIdentifiableEntity> {
protected final MongoStoreInvocationContext invocationContext;
public AbstractMongoAdapter(MongoStoreInvocationContext invocationContext) {
this.invocationContext = invocationContext;
}
protected abstract T getMongoEntity();
protected void updateMongoEntity() {
getMongoStore().updateEntity(getMongoEntity(), invocationContext);
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (o == null || getClass() != o.getClass()) return false;
AbstractMongoAdapter that = (AbstractMongoAdapter) o;
if (getMongoEntity() == null && that.getMongoEntity() == null) return true;
return getMongoEntity().equals(that.getMongoEntity());
}
@Override
public int hashCode() {
return getMongoEntity()!=null ? getMongoEntity().hashCode() : super.hashCode();
}
protected MongoStore getMongoStore() {
return invocationContext.getMongoStore();
}
}

View file

@ -1,770 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.adapters;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.ClientModel;
import org.keycloak.models.ClientTemplateModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.ModelDuplicateException;
import org.keycloak.models.ProtocolMapperModel;
import org.keycloak.models.RealmModel;
import org.keycloak.models.RoleModel;
import org.keycloak.models.mongo.keycloak.entities.MongoClientEntity;
import org.keycloak.models.mongo.keycloak.entities.MongoRoleEntity;
import org.keycloak.models.mongo.keycloak.entities.ProtocolMapperEntity;
import org.keycloak.models.mongo.utils.MongoModelUtils;
import org.keycloak.models.utils.KeycloakModelUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class ClientAdapter extends AbstractMongoAdapter<MongoClientEntity> implements ClientModel {
protected final MongoClientEntity clientEntity;
private final RealmModel realm;
protected KeycloakSession session;
public ClientAdapter(KeycloakSession session, RealmModel realm, MongoClientEntity clientEntity, MongoStoreInvocationContext invContext) {
super(invContext);
this.session = session;
this.realm = realm;
this.clientEntity = clientEntity;
}
@Override
public MongoClientEntity getMongoEntity() {
return clientEntity;
}
@Override
public void updateClient() {
updateMongoEntity();
session.getKeycloakSessionFactory().publish(new RealmModel.ClientUpdatedEvent() {
@Override
public ClientModel getUpdatedClient() {
return ClientAdapter.this;
}
@Override
public KeycloakSession getKeycloakSession() {
return session;
}
});
}
@Override
public String getId() {
return getMongoEntity().getId();
}
@Override
public String getClientId() {
return getMongoEntity().getClientId();
}
@Override
public String getName() {
return getMongoEntity().getName();
}
@Override
public void setName(String name) {
getMongoEntity().setName(name);
updateMongoEntity();
}
@Override
public String getDescription() { return getMongoEntity().getDescription(); }
@Override
public void setDescription(String description) {
getMongoEntity().setDescription(description);
updateMongoEntity();
}
@Override
public void setClientId(String clientId) {
getMongoEntity().setClientId(clientId);
updateMongoEntity();
}
@Override
public Set<String> getWebOrigins() {
Set<String> result = new HashSet<String>();
if (getMongoEntity().getWebOrigins() != null) {
result.addAll(getMongoEntity().getWebOrigins());
}
return result;
}
@Override
public void setWebOrigins(Set<String> webOrigins) {
List<String> result = new ArrayList<String>();
result.addAll(webOrigins);
getMongoEntity().setWebOrigins(result);
updateMongoEntity();
}
@Override
public void addWebOrigin(String webOrigin) {
getMongoStore().pushItemToList(clientEntity, "webOrigins", webOrigin, true, invocationContext);
}
@Override
public void removeWebOrigin(String webOrigin) {
getMongoStore().pullItemFromList(clientEntity, "webOrigins", webOrigin, invocationContext);
}
@Override
public Set<String> getRedirectUris() {
Set<String> result = new HashSet<String>();
if (getMongoEntity().getRedirectUris() != null) {
result.addAll(getMongoEntity().getRedirectUris());
}
return result;
}
@Override
public void setRedirectUris(Set<String> redirectUris) {
List<String> result = new ArrayList<String>();
result.addAll(redirectUris);
getMongoEntity().setRedirectUris(result);
updateMongoEntity();
}
@Override
public void addRedirectUri(String redirectUri) {
getMongoStore().pushItemToList(clientEntity, "redirectUris", redirectUri, true, invocationContext);
}
@Override
public void removeRedirectUri(String redirectUri) {
getMongoStore().pullItemFromList(clientEntity, "redirectUris", redirectUri, invocationContext);
}
@Override
public boolean isEnabled() {
return getMongoEntity().isEnabled();
}
@Override
public void setEnabled(boolean enabled) {
getMongoEntity().setEnabled(enabled);
updateMongoEntity();
}
@Override
public String getClientAuthenticatorType() {
return getMongoEntity().getClientAuthenticatorType();
}
@Override
public void setClientAuthenticatorType(String clientAuthenticatorType) {
getMongoEntity().setClientAuthenticatorType(clientAuthenticatorType);
updateMongoEntity();
}
@Override
public boolean validateSecret(String secret) {
return secret.equals(getMongoEntity().getSecret());
}
@Override
public String getSecret() {
return getMongoEntity().getSecret();
}
@Override
public void setSecret(String secret) {
getMongoEntity().setSecret(secret);
updateMongoEntity();
}
@Override
public String getRegistrationToken() {
return getMongoEntity().getRegistrationToken();
}
@Override
public void setRegistrationToken(String registrationToken) {
getMongoEntity().setRegistrationToken(registrationToken);
updateMongoEntity();
}
@Override
public boolean isPublicClient() {
return getMongoEntity().isPublicClient();
}
@Override
public void setPublicClient(boolean flag) {
getMongoEntity().setPublicClient(flag);
updateMongoEntity();
}
@Override
public boolean isFrontchannelLogout() {
return getMongoEntity().isFrontchannelLogout();
}
@Override
public void setFrontchannelLogout(boolean flag) {
getMongoEntity().setFrontchannelLogout(flag);
updateMongoEntity();
}
@Override
public boolean isFullScopeAllowed() {
return getMongoEntity().isFullScopeAllowed();
}
@Override
public void setFullScopeAllowed(boolean value) {
getMongoEntity().setFullScopeAllowed(value);
updateMongoEntity();
}
@Override
public RealmModel getRealm() {
return realm;
}
@Override
public int getNotBefore() {
return getMongoEntity().getNotBefore();
}
@Override
public void setNotBefore(int notBefore) {
getMongoEntity().setNotBefore(notBefore);
updateMongoEntity();
}
@Override
public Set<RoleModel> getScopeMappings() {
Set<RoleModel> result = new HashSet<RoleModel>();
List<MongoRoleEntity> roles = MongoModelUtils.getAllScopesOfClient(this, invocationContext);
for (MongoRoleEntity role : roles) {
if (realm.getId().equals(role.getRealmId())) {
result.add(new RoleAdapter(session, realm, role, realm, invocationContext));
} else {
// Likely applicationRole, but we don't have this application yet
result.add(new RoleAdapter(session, realm, role, invocationContext));
}
}
return result;
}
@Override
public Set<RoleModel> getRealmScopeMappings() {
Set<RoleModel> allScopes = getScopeMappings();
// Filter to retrieve just realm roles TODO: Maybe improve to avoid filter programmatically... Maybe have separate fields for realmRoles and appRoles on user?
Set<RoleModel> realmRoles = new HashSet<RoleModel>();
for (RoleModel role : allScopes) {
MongoRoleEntity roleEntity = ((RoleAdapter) role).getRole();
if (realm.getId().equals(roleEntity.getRealmId())) {
realmRoles.add(role);
}
}
return realmRoles;
}
@Override
public void addScopeMapping(RoleModel role) {
getMongoStore().pushItemToList(this.getMongoEntity(), "scopeIds", role.getId(), true, invocationContext);
}
@Override
public void deleteScopeMapping(RoleModel role) {
getMongoStore().pullItemFromList(this.getMongoEntity(), "scopeIds", role.getId(), invocationContext);
}
@Override
public String getProtocol() {
return getMongoEntity().getProtocol();
}
@Override
public void setProtocol(String protocol) {
getMongoEntity().setProtocol(protocol);
updateMongoEntity();
}
@Override
public void setAttribute(String name, String value) {
getMongoEntity().getAttributes().put(name, value);
updateMongoEntity();
}
@Override
public void removeAttribute(String name) {
getMongoEntity().getAttributes().remove(name);
updateMongoEntity();
}
@Override
public String getAttribute(String name) {
return getMongoEntity().getAttributes().get(name);
}
@Override
public Map<String, String> getAttributes() {
Map<String, String> copy = new HashMap<String, String>();
copy.putAll(getMongoEntity().getAttributes());
return copy;
}
@Override
public Set<ProtocolMapperModel> getProtocolMappers() {
Set<ProtocolMapperModel> result = new HashSet<ProtocolMapperModel>();
for (ProtocolMapperEntity entity : getMongoEntity().getProtocolMappers()) {
ProtocolMapperModel mapping = new ProtocolMapperModel();
mapping.setId(entity.getId());
mapping.setName(entity.getName());
mapping.setProtocol(entity.getProtocol());
mapping.setProtocolMapper(entity.getProtocolMapper());
mapping.setConsentRequired(entity.isConsentRequired());
mapping.setConsentText(entity.getConsentText());
Map<String, String> config = new HashMap<String, String>();
if (entity.getConfig() != null) {
config.putAll(entity.getConfig());
}
mapping.setConfig(config);
result.add(mapping);
}
return result;
}
@Override
public ProtocolMapperModel addProtocolMapper(ProtocolMapperModel model) {
if (getProtocolMapperByName(model.getProtocol(), model.getName()) != null) {
throw new ModelDuplicateException("Protocol mapper name must be unique per protocol");
}
ProtocolMapperEntity entity = new ProtocolMapperEntity();
String id = model.getId() != null ? model.getId() : KeycloakModelUtils.generateId();
entity.setId(id);
entity.setProtocol(model.getProtocol());
entity.setName(model.getName());
entity.setProtocolMapper(model.getProtocolMapper());
entity.setConfig(model.getConfig());
entity.setConsentRequired(model.isConsentRequired());
entity.setConsentText(model.getConsentText());
getMongoEntity().getProtocolMappers().add(entity);
updateMongoEntity();
return entityToModel(entity);
}
@Override
public void removeProtocolMapper(ProtocolMapperModel mapping) {
for (ProtocolMapperEntity entity : getMongoEntity().getProtocolMappers()) {
if (entity.getId().equals(mapping.getId())) {
session.users().preRemove(mapping);
getMongoEntity().getProtocolMappers().remove(entity);
updateMongoEntity();
break;
}
}
}
protected ProtocolMapperEntity getProtocolMapperyEntityById(String id) {
for (ProtocolMapperEntity entity : getMongoEntity().getProtocolMappers()) {
if (entity.getId().equals(id)) {
return entity;
}
}
return null;
}
protected ProtocolMapperEntity getProtocolMapperEntityByName(String protocol, String name) {
for (ProtocolMapperEntity entity : getMongoEntity().getProtocolMappers()) {
if (entity.getProtocol().equals(protocol) && entity.getName().equals(name)) {
return entity;
}
}
return null;
}
@Override
public void updateProtocolMapper(ProtocolMapperModel mapping) {
ProtocolMapperEntity entity = getProtocolMapperyEntityById(mapping.getId());
entity.setProtocolMapper(mapping.getProtocolMapper());
entity.setConsentRequired(mapping.isConsentRequired());
entity.setConsentText(mapping.getConsentText());
if (entity.getConfig() != null) {
entity.getConfig().clear();
entity.getConfig().putAll(mapping.getConfig());
} else {
entity.setConfig(mapping.getConfig());
}
updateMongoEntity();
}
@Override
public ProtocolMapperModel getProtocolMapperById(String id) {
ProtocolMapperEntity entity = getProtocolMapperyEntityById(id);
if (entity == null) return null;
return entityToModel(entity);
}
@Override
public ProtocolMapperModel getProtocolMapperByName(String protocol, String name) {
ProtocolMapperEntity entity = getProtocolMapperEntityByName(protocol, name);
if (entity == null) return null;
return entityToModel(entity);
}
protected ProtocolMapperModel entityToModel(ProtocolMapperEntity entity) {
ProtocolMapperModel mapping = new ProtocolMapperModel();
mapping.setId(entity.getId());
mapping.setName(entity.getName());
mapping.setProtocol(entity.getProtocol());
mapping.setProtocolMapper(entity.getProtocolMapper());
mapping.setConsentRequired(entity.isConsentRequired());
mapping.setConsentText(entity.getConsentText());
Map<String, String> config = new HashMap<String, String>();
if (entity.getConfig() != null) config.putAll(entity.getConfig());
mapping.setConfig(config);
return mapping;
}
@Override
public boolean isSurrogateAuthRequired() {
return getMongoEntity().isSurrogateAuthRequired();
}
@Override
public void setSurrogateAuthRequired(boolean surrogateAuthRequired) {
getMongoEntity().setSurrogateAuthRequired(surrogateAuthRequired);
updateMongoEntity();
}
@Override
public String getManagementUrl() {
return getMongoEntity().getManagementUrl();
}
@Override
public void setManagementUrl(String url) {
getMongoEntity().setManagementUrl(url);
updateMongoEntity();
}
@Override
public void setRootUrl(String url) {
getMongoEntity().setRootUrl(url);
updateMongoEntity();
}
@Override
public String getRootUrl() {
return getMongoEntity().getRootUrl();
}
@Override
public void setBaseUrl(String url) {
getMongoEntity().setBaseUrl(url);
updateMongoEntity();
}
@Override
public String getBaseUrl() {
return getMongoEntity().getBaseUrl();
}
@Override
public boolean isBearerOnly() {
return getMongoEntity().isBearerOnly();
}
@Override
public void setBearerOnly(boolean only) {
getMongoEntity().setBearerOnly(only);
updateMongoEntity();
}
@Override
public boolean isConsentRequired() {
return getMongoEntity().isConsentRequired();
}
@Override
public void setConsentRequired(boolean consentRequired) {
getMongoEntity().setConsentRequired(consentRequired);
updateMongoEntity();
}
@Override
public boolean isStandardFlowEnabled() {
return getMongoEntity().isStandardFlowEnabled();
}
@Override
public void setStandardFlowEnabled(boolean standardFlowEnabled) {
getMongoEntity().setStandardFlowEnabled(standardFlowEnabled);
updateMongoEntity();
}
@Override
public boolean isImplicitFlowEnabled() {
return getMongoEntity().isImplicitFlowEnabled();
}
@Override
public void setImplicitFlowEnabled(boolean implicitFlowEnabled) {
getMongoEntity().setImplicitFlowEnabled(implicitFlowEnabled);
updateMongoEntity();
}
@Override
public boolean isDirectAccessGrantsEnabled() {
return getMongoEntity().isDirectAccessGrantsEnabled();
}
@Override
public void setDirectAccessGrantsEnabled(boolean directAccessGrantsEnabled) {
getMongoEntity().setDirectAccessGrantsEnabled(directAccessGrantsEnabled);
updateMongoEntity();
}
@Override
public boolean isServiceAccountsEnabled() {
return getMongoEntity().isServiceAccountsEnabled();
}
@Override
public void setServiceAccountsEnabled(boolean serviceAccountsEnabled) {
getMongoEntity().setServiceAccountsEnabled(serviceAccountsEnabled);
updateMongoEntity();
}
@Override
public RoleModel getRole(String name) {
return session.realms().getClientRole(realm, this, name);
}
@Override
public RoleModel addRole(String name) {
return session.realms().addClientRole(realm, this, name);
}
@Override
public RoleModel addRole(String id, String name) {
return session.realms().addClientRole(realm, this, id, name);
}
@Override
public boolean removeRole(RoleModel role) {
return session.realms().removeRole(realm, role);
}
@Override
public Set<RoleModel> getRoles() {
return session.realms().getClientRoles(realm, this);
}
@Override
public boolean hasScope(RoleModel role) {
if (isFullScopeAllowed()) return true;
Set<RoleModel> roles = getScopeMappings();
if (roles.contains(role)) return true;
for (RoleModel mapping : roles) {
if (mapping.hasRole(role)) return true;
}
roles = getRoles();
if (roles.contains(role)) return true;
for (RoleModel mapping : roles) {
if (mapping.hasRole(role)) return true;
}
return false;
}
@Override
public List<String> getDefaultRoles() {
return getMongoEntity().getDefaultRoles();
}
@Override
public void addDefaultRole(String name) {
RoleModel role = getRole(name);
if (role == null) {
addRole(name);
}
getMongoStore().pushItemToList(getMongoEntity(), "defaultRoles", name, true, invocationContext);
}
@Override
public void updateDefaultRoles(String... defaultRoles) {
List<String> roleNames = new ArrayList<String>();
for (String roleName : defaultRoles) {
RoleModel role = getRole(roleName);
if (role == null) {
addRole(roleName);
}
roleNames.add(roleName);
}
getMongoEntity().setDefaultRoles(roleNames);
updateMongoEntity();
}
@Override
public void removeDefaultRoles(String... defaultRoles) {
List<String> roleNames = new ArrayList<String>();
for (String role : getMongoEntity().getDefaultRoles()) {
if (!RealmAdapter.contains(role, defaultRoles)) roleNames.add(role);
}
getMongoEntity().setDefaultRoles(roleNames);
updateMongoEntity();
}
@Override
public int getNodeReRegistrationTimeout() {
return getMongoEntity().getNodeReRegistrationTimeout();
}
@Override
public void setNodeReRegistrationTimeout(int timeout) {
getMongoEntity().setNodeReRegistrationTimeout(timeout);
updateMongoEntity();
}
@Override
public Map<String, Integer> getRegisteredNodes() {
return getMongoEntity().getRegisteredNodes() == null ? Collections.<String, Integer>emptyMap() : Collections.unmodifiableMap(getMongoEntity().getRegisteredNodes());
}
@Override
public void registerNode(String nodeHost, int registrationTime) {
MongoClientEntity entity = getMongoEntity();
if (entity.getRegisteredNodes() == null) {
entity.setRegisteredNodes(new HashMap<String, Integer>());
}
entity.getRegisteredNodes().put(nodeHost, registrationTime);
updateMongoEntity();
}
@Override
public void unregisterNode(String nodeHost) {
MongoClientEntity entity = getMongoEntity();
if (entity.getRegisteredNodes() == null) return;
entity.getRegisteredNodes().remove(nodeHost);
updateMongoEntity();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || !(o instanceof ClientModel)) return false;
ClientModel that = (ClientModel) o;
return that.getId().equals(getId());
}
@Override
public int hashCode() {
return getId().hashCode();
}
@Override
public ClientTemplateModel getClientTemplate() {
if (getMongoEntity().getClientTemplate() == null) return null;
return session.realms().getClientTemplateById(getMongoEntity().getClientTemplate(), realm);
}
@Override
public void setClientTemplate(ClientTemplateModel template) {
if (template == null) {
getMongoEntity().setClientTemplate(null);
} else {
getMongoEntity().setClientTemplate(template.getId());
}
updateMongoEntity();
}
@Override
public boolean useTemplateScope() {
return getMongoEntity().isUseTemplateScope();
}
@Override
public void setUseTemplateScope(boolean flag) {
getMongoEntity().setUseTemplateScope(flag);
updateMongoEntity();
}
@Override
public boolean useTemplateMappers() {
return getMongoEntity().isUseTemplateMappers();
}
@Override
public void setUseTemplateMappers(boolean flag) {
getMongoEntity().setUseTemplateMappers(flag);
updateMongoEntity();
}
@Override
public boolean useTemplateConfig() {
return getMongoEntity().isUseTemplateConfig();
}
@Override
public void setUseTemplateConfig(boolean flag) {
getMongoEntity().setUseTemplateConfig(flag);
updateMongoEntity();
}
}

View file

@ -1,417 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.adapters;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.ClientTemplateModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.ModelDuplicateException;
import org.keycloak.models.ProtocolMapperModel;
import org.keycloak.models.RealmModel;
import org.keycloak.models.RoleModel;
import org.keycloak.models.mongo.keycloak.entities.MongoClientTemplateEntity;
import org.keycloak.models.mongo.keycloak.entities.MongoRoleEntity;
import org.keycloak.models.mongo.keycloak.entities.ProtocolMapperEntity;
import org.keycloak.models.mongo.utils.MongoModelUtils;
import org.keycloak.models.utils.KeycloakModelUtils;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
*/
public class ClientTemplateAdapter extends AbstractMongoAdapter<MongoClientTemplateEntity> implements ClientTemplateModel {
protected final MongoClientTemplateEntity clientTemplateEntity;
private final RealmModel realm;
protected KeycloakSession session;
public ClientTemplateAdapter(KeycloakSession session, RealmModel realm, MongoClientTemplateEntity clientEntity, MongoStoreInvocationContext invContext) {
super(invContext);
this.session = session;
this.realm = realm;
this.clientTemplateEntity = clientEntity;
}
@Override
public MongoClientTemplateEntity getMongoEntity() {
return clientTemplateEntity;
}
@Override
public RealmModel getRealm() {
return realm;
}
@Override
public String getId() {
return getMongoEntity().getId();
}
@Override
public String getName() {
return getMongoEntity().getName();
}
@Override
public void setName(String name) {
getMongoEntity().setName(name);
updateMongoEntity();
}
@Override
public String getDescription() { return getMongoEntity().getDescription(); }
@Override
public String getProtocol() {
return getMongoEntity().getProtocol();
}
@Override
public void setProtocol(String protocol) {
getMongoEntity().setProtocol(protocol);
updateMongoEntity();
}
@Override
public void setDescription(String description) {
getMongoEntity().setDescription(description);
updateMongoEntity();
}
@Override
public Set<ProtocolMapperModel> getProtocolMappers() {
Set<ProtocolMapperModel> result = new HashSet<ProtocolMapperModel>();
for (ProtocolMapperEntity entity : getMongoEntity().getProtocolMappers()) {
ProtocolMapperModel mapping = new ProtocolMapperModel();
mapping.setId(entity.getId());
mapping.setName(entity.getName());
mapping.setProtocol(entity.getProtocol());
mapping.setProtocolMapper(entity.getProtocolMapper());
mapping.setConsentRequired(entity.isConsentRequired());
mapping.setConsentText(entity.getConsentText());
Map<String, String> config = new HashMap<String, String>();
if (entity.getConfig() != null) {
config.putAll(entity.getConfig());
}
mapping.setConfig(config);
result.add(mapping);
}
return result;
}
@Override
public ProtocolMapperModel addProtocolMapper(ProtocolMapperModel model) {
if (getProtocolMapperByName(model.getProtocol(), model.getName()) != null) {
throw new ModelDuplicateException("Protocol mapper name must be unique per protocol");
}
ProtocolMapperEntity entity = new ProtocolMapperEntity();
String id = model.getId() != null ? model.getId() : KeycloakModelUtils.generateId();
entity.setId(id);
entity.setProtocol(model.getProtocol());
entity.setName(model.getName());
entity.setProtocolMapper(model.getProtocolMapper());
entity.setConfig(model.getConfig());
entity.setConsentRequired(model.isConsentRequired());
entity.setConsentText(model.getConsentText());
getMongoEntity().getProtocolMappers().add(entity);
updateMongoEntity();
return entityToModel(entity);
}
@Override
public void removeProtocolMapper(ProtocolMapperModel mapping) {
for (ProtocolMapperEntity entity : getMongoEntity().getProtocolMappers()) {
if (entity.getId().equals(mapping.getId())) {
session.users().preRemove(mapping);
getMongoEntity().getProtocolMappers().remove(entity);
updateMongoEntity();
break;
}
}
}
protected ProtocolMapperEntity getProtocolMapperyEntityById(String id) {
for (ProtocolMapperEntity entity : getMongoEntity().getProtocolMappers()) {
if (entity.getId().equals(id)) {
return entity;
}
}
return null;
}
protected ProtocolMapperEntity getProtocolMapperEntityByName(String protocol, String name) {
for (ProtocolMapperEntity entity : getMongoEntity().getProtocolMappers()) {
if (entity.getProtocol().equals(protocol) && entity.getName().equals(name)) {
return entity;
}
}
return null;
}
@Override
public void updateProtocolMapper(ProtocolMapperModel mapping) {
ProtocolMapperEntity entity = getProtocolMapperyEntityById(mapping.getId());
entity.setProtocolMapper(mapping.getProtocolMapper());
entity.setConsentRequired(mapping.isConsentRequired());
entity.setConsentText(mapping.getConsentText());
if (entity.getConfig() != null) {
entity.getConfig().clear();
entity.getConfig().putAll(mapping.getConfig());
} else {
entity.setConfig(mapping.getConfig());
}
updateMongoEntity();
}
@Override
public ProtocolMapperModel getProtocolMapperById(String id) {
ProtocolMapperEntity entity = getProtocolMapperyEntityById(id);
if (entity == null) return null;
return entityToModel(entity);
}
@Override
public ProtocolMapperModel getProtocolMapperByName(String protocol, String name) {
ProtocolMapperEntity entity = getProtocolMapperEntityByName(protocol, name);
if (entity == null) return null;
return entityToModel(entity);
}
protected ProtocolMapperModel entityToModel(ProtocolMapperEntity entity) {
ProtocolMapperModel mapping = new ProtocolMapperModel();
mapping.setId(entity.getId());
mapping.setName(entity.getName());
mapping.setProtocol(entity.getProtocol());
mapping.setProtocolMapper(entity.getProtocolMapper());
mapping.setConsentRequired(entity.isConsentRequired());
mapping.setConsentText(entity.getConsentText());
Map<String, String> config = new HashMap<String, String>();
if (entity.getConfig() != null) config.putAll(entity.getConfig());
mapping.setConfig(config);
return mapping;
}
@Override
public boolean isFullScopeAllowed() {
return getMongoEntity().isFullScopeAllowed();
}
@Override
public void setFullScopeAllowed(boolean value) {
getMongoEntity().setFullScopeAllowed(value);
updateMongoEntity();
}
@Override
public Set<RoleModel> getScopeMappings() {
Set<RoleModel> result = new HashSet<RoleModel>();
List<MongoRoleEntity> roles = MongoModelUtils.getAllScopesOfTemplate(this, invocationContext);
for (MongoRoleEntity role : roles) {
if (realm.getId().equals(role.getRealmId())) {
result.add(new RoleAdapter(session, realm, role, realm, invocationContext));
} else {
// Likely applicationRole, but we don't have this application yet
result.add(new RoleAdapter(session, realm, role, invocationContext));
}
}
return result;
}
@Override
public Set<RoleModel> getRealmScopeMappings() {
Set<RoleModel> allScopes = getScopeMappings();
// Filter to retrieve just realm roles TODO: Maybe improve to avoid filter programmatically... Maybe have separate fields for realmRoles and appRoles on user?
Set<RoleModel> realmRoles = new HashSet<RoleModel>();
for (RoleModel role : allScopes) {
MongoRoleEntity roleEntity = ((RoleAdapter) role).getRole();
if (realm.getId().equals(roleEntity.getRealmId())) {
realmRoles.add(role);
}
}
return realmRoles;
}
@Override
public void addScopeMapping(RoleModel role) {
getMongoStore().pushItemToList(this.getMongoEntity(), "scopeIds", role.getId(), true, invocationContext);
}
@Override
public void deleteScopeMapping(RoleModel role) {
getMongoStore().pullItemFromList(this.getMongoEntity(), "scopeIds", role.getId(), invocationContext);
}
@Override
public boolean hasScope(RoleModel role) {
if (isFullScopeAllowed()) return true;
Set<RoleModel> roles = getScopeMappings();
if (roles.contains(role)) return true;
for (RoleModel mapping : roles) {
if (mapping.hasRole(role)) return true;
}
return false;
}
@Override
public boolean isPublicClient() {
return getMongoEntity().isPublicClient();
}
@Override
public void setPublicClient(boolean flag) {
getMongoEntity().setPublicClient(flag);
updateMongoEntity();
}
@Override
public boolean isFrontchannelLogout() {
return getMongoEntity().isFrontchannelLogout();
}
@Override
public void setFrontchannelLogout(boolean flag) {
getMongoEntity().setFrontchannelLogout(flag);
updateMongoEntity();
}
@Override
public void setAttribute(String name, String value) {
getMongoEntity().getAttributes().put(name, value);
updateMongoEntity();
}
@Override
public void removeAttribute(String name) {
getMongoEntity().getAttributes().remove(name);
updateMongoEntity();
}
@Override
public String getAttribute(String name) {
return getMongoEntity().getAttributes().get(name);
}
@Override
public Map<String, String> getAttributes() {
Map<String, String> copy = new HashMap<String, String>();
copy.putAll(getMongoEntity().getAttributes());
return copy;
}
@Override
public boolean isBearerOnly() {
return getMongoEntity().isBearerOnly();
}
@Override
public void setBearerOnly(boolean only) {
getMongoEntity().setBearerOnly(only);
updateMongoEntity();
}
@Override
public boolean isConsentRequired() {
return getMongoEntity().isConsentRequired();
}
@Override
public void setConsentRequired(boolean consentRequired) {
getMongoEntity().setConsentRequired(consentRequired);
updateMongoEntity();
}
@Override
public boolean isStandardFlowEnabled() {
return getMongoEntity().isStandardFlowEnabled();
}
@Override
public void setStandardFlowEnabled(boolean standardFlowEnabled) {
getMongoEntity().setStandardFlowEnabled(standardFlowEnabled);
updateMongoEntity();
}
@Override
public boolean isImplicitFlowEnabled() {
return getMongoEntity().isImplicitFlowEnabled();
}
@Override
public void setImplicitFlowEnabled(boolean implicitFlowEnabled) {
getMongoEntity().setImplicitFlowEnabled(implicitFlowEnabled);
updateMongoEntity();
}
@Override
public boolean isDirectAccessGrantsEnabled() {
return getMongoEntity().isDirectAccessGrantsEnabled();
}
@Override
public void setDirectAccessGrantsEnabled(boolean directAccessGrantsEnabled) {
getMongoEntity().setDirectAccessGrantsEnabled(directAccessGrantsEnabled);
updateMongoEntity();
}
@Override
public boolean isServiceAccountsEnabled() {
return getMongoEntity().isServiceAccountsEnabled();
}
@Override
public void setServiceAccountsEnabled(boolean serviceAccountsEnabled) {
getMongoEntity().setServiceAccountsEnabled(serviceAccountsEnabled);
updateMongoEntity();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || !(o instanceof ClientTemplateModel)) return false;
ClientTemplateModel that = (ClientTemplateModel) o;
return that.getId().equals(getId());
}
@Override
public int hashCode() {
return getId().hashCode();
}
}

View file

@ -1,267 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.adapters;
import com.mongodb.DBObject;
import com.mongodb.QueryBuilder;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.ClientModel;
import org.keycloak.models.GroupModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.ModelException;
import org.keycloak.models.RealmModel;
import org.keycloak.models.RoleModel;
import org.keycloak.models.mongo.keycloak.entities.MongoGroupEntity;
import org.keycloak.models.utils.KeycloakModelUtils;
import org.keycloak.models.utils.RoleUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
*
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
*/
public class GroupAdapter extends AbstractMongoAdapter<MongoGroupEntity> implements GroupModel {
private final MongoGroupEntity group;
private RealmModel realm;
private KeycloakSession session;
public GroupAdapter(KeycloakSession session, RealmModel realm, MongoGroupEntity group, MongoStoreInvocationContext invContext) {
super(invContext);
this.group = group;
this.realm = realm;
this.session = session;
}
@Override
public String getId() {
return group.getId();
}
@Override
public String getName() {
return group.getName();
}
@Override
public void setName(String name) {
group.setName(name);
updateGroup();
}
protected void updateGroup() {
super.updateMongoEntity();
}
@Override
public MongoGroupEntity getMongoEntity() {
return group;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || !(o instanceof GroupModel)) return false;
GroupModel that = (GroupModel) o;
return that.getId().equals(getId());
}
@Override
public int hashCode() {
return getId().hashCode();
}
@Override
public void setSingleAttribute(String name, String value) {
if (group.getAttributes() == null) {
group.setAttributes(new HashMap<String, List<String>>());
}
List<String> attrValues = new ArrayList<>();
attrValues.add(value);
group.getAttributes().put(name, attrValues);
updateGroup();
}
@Override
public void setAttribute(String name, List<String> values) {
if (group.getAttributes() == null) {
group.setAttributes(new HashMap<String, List<String>>());
}
group.getAttributes().put(name, values);
updateGroup();
}
@Override
public void removeAttribute(String name) {
if (group.getAttributes() == null) return;
group.getAttributes().remove(name);
updateGroup();
}
@Override
public String getFirstAttribute(String name) {
if (group.getAttributes()==null) return null;
List<String> attrValues = group.getAttributes().get(name);
return (attrValues==null || attrValues.isEmpty()) ? null : attrValues.get(0);
}
@Override
public List<String> getAttribute(String name) {
if (group.getAttributes()==null) return Collections.<String>emptyList();
List<String> attrValues = group.getAttributes().get(name);
return (attrValues == null) ? Collections.<String>emptyList() : Collections.unmodifiableList(attrValues);
}
@Override
public Map<String, List<String>> getAttributes() {
return group.getAttributes()==null ? Collections.<String, List<String>>emptyMap() : Collections.unmodifiableMap((Map) group.getAttributes());
}
@Override
public boolean hasRole(RoleModel role) {
Set<RoleModel> roles = getRoleMappings();
return RoleUtils.hasRole(roles, role);
}
@Override
public void grantRole(RoleModel role) {
getMongoStore().pushItemToList(group, "roleIds", role.getId(), true, invocationContext);
}
@Override
public Set<RoleModel> getRoleMappings() {
if (group.getRoleIds() == null || group.getRoleIds().isEmpty()) return Collections.EMPTY_SET;
Set<RoleModel> roles = new HashSet<>();
for (String id : group.getRoleIds()) {
RoleModel roleById = realm.getRoleById(id);
if (roleById == null) {
throw new ModelException("role does not exist in group role mappings");
}
roles.add(roleById);
}
return roles;
}
@Override
public Set<RoleModel> getRealmRoleMappings() {
Set<RoleModel> allRoles = getRoleMappings();
// Filter to retrieve just realm roles
Set<RoleModel> realmRoles = new HashSet<RoleModel>();
for (RoleModel role : allRoles) {
if (role.getContainer() instanceof RealmModel) {
realmRoles.add(role);
}
}
return realmRoles;
}
@Override
public void deleteRoleMapping(RoleModel role) {
if (group == null || role == null) return;
getMongoStore().pullItemFromList(group, "roleIds", role.getId(), invocationContext);
}
@Override
public Set<RoleModel> getClientRoleMappings(ClientModel app) {
Set<RoleModel> result = new HashSet<RoleModel>();
Set<RoleModel> roles = getRoleMappings();
for (RoleModel role : roles) {
if (app.equals(role.getContainer())) {
result.add(role);
}
}
return result;
}
@Override
public GroupModel getParent() {
if (group.getParentId() == null) return null;
return realm.getGroupById(group.getParentId());
}
@Override
public String getParentId() {
return group.getParentId();
}
@Override
public Set<GroupModel> getSubGroups() {
DBObject query = new QueryBuilder()
.and("realmId").is(realm.getId())
.and("parentId").is(getId())
.get();
List<MongoGroupEntity> groups = getMongoStore().loadEntities(MongoGroupEntity.class, query, invocationContext);
Set<GroupModel> subGroups = new HashSet<>();
if (groups == null) return subGroups;
for (MongoGroupEntity group : groups) {
subGroups.add(realm.getGroupById(group.getId()));
}
return subGroups;
}
@Override
public void setParent(GroupModel parent) {
if (parent == null) group.setParentId(null);
else if (parent.getId().equals(getId())) {
return;
}
else {
group.setParentId(parent.getId());
}
updateGroup();
}
@Override
public void addChild(GroupModel subGroup) {
if (subGroup.getId().equals(getId())) {
return;
}
subGroup.setParent(this);
updateGroup();
}
@Override
public void removeChild(GroupModel subGroup) {
if (subGroup.getId().equals(getId())) {
return;
}
subGroup.setParent(null);
updateGroup();
}
}

View file

@ -1,55 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.adapters;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.migration.MigrationModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.mongo.keycloak.entities.MongoMigrationModelEntity;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class MigrationModelAdapter extends AbstractMongoAdapter<MongoMigrationModelEntity> implements MigrationModel {
protected final MongoMigrationModelEntity entity;
public MigrationModelAdapter(KeycloakSession session, MongoMigrationModelEntity entity, MongoStoreInvocationContext invContext) {
super(invContext);
this.entity = entity;
}
@Override
public MongoMigrationModelEntity getMongoEntity() {
return entity;
}
@Override
public String getStoredVersion() {
return getMongoEntity().getVersion();
}
@Override
public void setStoredVersion(String version) {
getMongoEntity().setVersion(version);
updateMongoEntity();
}
}

View file

@ -1,473 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.adapters;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import com.mongodb.QueryBuilder;
import org.keycloak.connections.mongo.api.MongoStore;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.migration.MigrationModel;
import org.keycloak.models.ClientModel;
import org.keycloak.models.ClientTemplateModel;
import org.keycloak.models.GroupModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.RealmProvider;
import org.keycloak.models.RoleContainerModel;
import org.keycloak.models.RoleModel;
import org.keycloak.models.mongo.keycloak.entities.MongoClientEntity;
import org.keycloak.models.mongo.keycloak.entities.MongoClientTemplateEntity;
import org.keycloak.models.mongo.keycloak.entities.MongoGroupEntity;
import org.keycloak.models.mongo.keycloak.entities.MongoMigrationModelEntity;
import org.keycloak.models.mongo.keycloak.entities.MongoRealmEntity;
import org.keycloak.models.mongo.keycloak.entities.MongoRoleEntity;
import org.keycloak.models.utils.KeycloakModelUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class MongoRealmProvider implements RealmProvider {
private final MongoStoreInvocationContext invocationContext;
private final KeycloakSession session;
public MongoRealmProvider(KeycloakSession session, MongoStoreInvocationContext invocationContext) {
this.session = session;
this.invocationContext = invocationContext;
}
@Override
public void close() {
// TODO
}
@Override
public MigrationModel getMigrationModel() {
MongoMigrationModelEntity entity = getMongoStore().loadEntity(MongoMigrationModelEntity.class, MongoMigrationModelEntity.MIGRATION_MODEL_ID, invocationContext);
if (entity == null) {
entity = new MongoMigrationModelEntity();
getMongoStore().insertEntity(entity, invocationContext);
}
return new MigrationModelAdapter(session, entity, invocationContext);
}
@Override
public RealmModel createRealm(String name) {
return createRealm(KeycloakModelUtils.generateId(), name);
}
@Override
public RealmModel createRealm(String id, String name) {
MongoRealmEntity newRealm = new MongoRealmEntity();
newRealm.setId(id);
newRealm.setName(name);
getMongoStore().insertEntity(newRealm, invocationContext);
final RealmModel model = new RealmAdapter(session, newRealm, invocationContext);
session.getKeycloakSessionFactory().publish(new RealmModel.RealmCreationEvent() {
@Override
public RealmModel getCreatedRealm() {
return model;
}
});
return model;
}
@Override
public RealmModel getRealm(String id) {
MongoRealmEntity realmEntity = getMongoStore().loadEntity(MongoRealmEntity.class, id, invocationContext);
return realmEntity != null ? new RealmAdapter(session, realmEntity, invocationContext) : null;
}
@Override
public List<RealmModel> getRealms() {
DBObject query = new BasicDBObject();
List<MongoRealmEntity> realms = getMongoStore().loadEntities(MongoRealmEntity.class, query, invocationContext);
List<RealmModel> results = new ArrayList<RealmModel>();
for (MongoRealmEntity realmEntity : realms) {
RealmModel realm = session.realms().getRealm(realmEntity.getId());
if (realm != null) results.add(realm);
}
return results;
}
@Override
public RealmModel getRealmByName(String name) {
DBObject query = new QueryBuilder()
.and("name").is(name)
.get();
MongoRealmEntity realm = getMongoStore().loadSingleEntity(MongoRealmEntity.class, query, invocationContext);
if (realm == null) return null;
return session.realms().getRealm(realm.getId());
}
@Override
public boolean removeRealm(String id) {
final RealmModel realm = getRealm(id);
if (realm == null) return false;
session.users().preRemove(realm);
boolean removed = getMongoStore().removeEntity(MongoRealmEntity.class, id, invocationContext);
if (removed) {
session.getKeycloakSessionFactory().publish(new RealmModel.RealmRemovedEvent() {
@Override
public RealmModel getRealm() {
return realm;
}
@Override
public KeycloakSession getKeycloakSession() {
return session;
}
});
}
return removed;
}
protected MongoStore getMongoStore() {
return invocationContext.getMongoStore();
}
@Override
public RoleModel getRoleById(String id, RealmModel realm) {
MongoRoleEntity role = getMongoStore().loadEntity(MongoRoleEntity.class, id, invocationContext);
if (role == null) return null;
if (role.getRealmId() != null && !role.getRealmId().equals(realm.getId())) return null;
if (role.getClientId() != null && realm.getClientById(role.getClientId()) == null) return null;
return new RoleAdapter(session, realm, role, null, invocationContext);
}
@Override
public GroupModel getGroupById(String id, RealmModel realm) {
MongoGroupEntity group = getMongoStore().loadEntity(MongoGroupEntity.class, id, invocationContext);
if (group == null) return null;
if (group.getRealmId() != null && !group.getRealmId().equals(realm.getId())) return null;
return new GroupAdapter(session, realm, group, invocationContext);
}
@Override
public void moveGroup(RealmModel realm, GroupModel group, GroupModel toParent) {
if (toParent != null && group.getId().equals(toParent.getId())) {
return;
}
if (group.getParentId() != null) {
group.getParent().removeChild(group);
}
group.setParent(toParent);
if (toParent != null) toParent.addChild(group);
else session.realms().addTopLevelGroup(realm, group);
}
@Override
public List<GroupModel> getGroups(RealmModel realm) {
DBObject query = new QueryBuilder()
.and("realmId").is(realm.getId())
.get();
List<MongoGroupEntity> groups = getMongoStore().loadEntities(MongoGroupEntity.class, query, invocationContext);
if (groups == null) return Collections.EMPTY_LIST;
List<GroupModel> result = new LinkedList<>();
if (groups == null) return result;
for (MongoGroupEntity group : groups) {
result.add(getGroupById(group.getId(), realm));
}
return Collections.unmodifiableList(result);
}
@Override
public List<GroupModel> getTopLevelGroups(RealmModel realm) {
DBObject query = new QueryBuilder()
.and("realmId").is(realm.getId())
.and("parentId").is(null)
.get();
List<MongoGroupEntity> groups = getMongoStore().loadEntities(MongoGroupEntity.class, query, invocationContext);
if (groups == null) return Collections.EMPTY_LIST;
List<GroupModel> result = new LinkedList<>();
if (groups == null) return result;
for (MongoGroupEntity group : groups) {
result.add(getGroupById(group.getId(), realm));
}
return Collections.unmodifiableList(result);
}
@Override
public boolean removeGroup(RealmModel realm, GroupModel group) {
session.users().preRemove(realm, group);
realm.removeDefaultGroup(group);
for (GroupModel subGroup : group.getSubGroups()) {
removeGroup(realm, subGroup);
}
moveGroup(realm, group, null);
return getMongoStore().removeEntity(MongoGroupEntity.class, group.getId(), invocationContext);
}
@Override
public GroupModel createGroup(RealmModel realm, String name) {
String id = KeycloakModelUtils.generateId();
return createGroup(realm, id, name);
}
@Override
public GroupModel createGroup(RealmModel realm, String id, String name) {
if (id == null) id = KeycloakModelUtils.generateId();
MongoGroupEntity group = new MongoGroupEntity();
group.setId(id);
group.setName(name);
group.setRealmId(realm.getId());
getMongoStore().insertEntity(group, invocationContext);
return new GroupAdapter(session, realm, group, invocationContext);
}
@Override
public void addTopLevelGroup(RealmModel realm, GroupModel subGroup) {
subGroup.setParent(null);
}
@Override
public ClientModel getClientById(String id, RealmModel realm) {
MongoClientEntity appData = getMongoStore().loadEntity(MongoClientEntity.class, id, invocationContext);
// Check if application belongs to this realm
if (appData == null || !realm.getId().equals(appData.getRealmId())) {
return null;
}
return new ClientAdapter(session, realm, appData, invocationContext);
}
@Override
public ClientModel addClient(RealmModel realm, String clientId) {
return addClient(realm, KeycloakModelUtils.generateId(), clientId);
}
@Override
public ClientModel addClient(RealmModel realm, String id, String clientId) {
MongoClientEntity clientEntity = new MongoClientEntity();
clientEntity.setId(id);
clientEntity.setClientId(clientId);
clientEntity.setRealmId(realm.getId());
clientEntity.setEnabled(true);
clientEntity.setStandardFlowEnabled(true);
getMongoStore().insertEntity(clientEntity, invocationContext);
if (clientId == null) {
clientEntity.setClientId(clientEntity.getId());
getMongoStore().updateEntity(clientEntity, invocationContext);
}
final ClientModel model = new ClientAdapter(session, realm, clientEntity, invocationContext);
session.getKeycloakSessionFactory().publish(new RealmModel.ClientCreationEvent() {
@Override
public ClientModel getCreatedClient() {
return model;
}
});
return model;
}
@Override
public List<ClientModel> getClients(RealmModel realm) {
DBObject query = new QueryBuilder()
.and("realmId").is(realm.getId())
.get();
List<MongoClientEntity> clientEntities = getMongoStore().loadEntities(MongoClientEntity.class, query, invocationContext);
if (clientEntities.isEmpty()) return Collections.EMPTY_LIST;
List<ClientModel> result = new ArrayList<ClientModel>();
for (MongoClientEntity clientEntity : clientEntities) {
result.add(session.realms().getClientById(clientEntity.getId(), realm));
}
return Collections.unmodifiableList(result);
}
@Override
public RoleModel addRealmRole(RealmModel realm, String name) {
return addRealmRole(realm, KeycloakModelUtils.generateId(), name);
}
@Override
public RoleModel addRealmRole(RealmModel realm, String id, String name) {
MongoRoleEntity roleEntity = new MongoRoleEntity();
roleEntity.setId(id);
roleEntity.setName(name);
roleEntity.setRealmId(realm.getId());
getMongoStore().insertEntity(roleEntity, invocationContext);
return new RoleAdapter(session, realm, roleEntity, realm, invocationContext);
}
@Override
public Set<RoleModel> getRealmRoles(RealmModel realm) {
DBObject query = new QueryBuilder()
.and("realmId").is(realm.getId())
.get();
List<MongoRoleEntity> roles = getMongoStore().loadEntities(MongoRoleEntity.class, query, invocationContext);
if (roles == null) return Collections.EMPTY_SET;
Set<RoleModel> result = new HashSet<RoleModel>();
for (MongoRoleEntity role : roles) {
result.add(session.realms().getRoleById(role.getId(), realm));
}
return Collections.unmodifiableSet(result);
}
@Override
public Set<RoleModel> getClientRoles(RealmModel realm, ClientModel client) {
DBObject query = new QueryBuilder()
.and("clientId").is(client.getId())
.get();
List<MongoRoleEntity> roles = getMongoStore().loadEntities(MongoRoleEntity.class, query, invocationContext);
Set<RoleModel> result = new HashSet<RoleModel>();
for (MongoRoleEntity role : roles) {
result.add(session.realms().getRoleById(role.getId(), realm));
}
return result;
}
@Override
public RoleModel getRealmRole(RealmModel realm, String name) {
DBObject query = new QueryBuilder()
.and("name").is(name)
.and("realmId").is(realm.getId())
.get();
MongoRoleEntity role = getMongoStore().loadSingleEntity(MongoRoleEntity.class, query, invocationContext);
if (role == null) {
return null;
} else {
return session.realms().getRoleById(role.getId(), realm);
}
}
@Override
public RoleModel getClientRole(RealmModel realm, ClientModel client, String name) {
DBObject query = new QueryBuilder()
.and("name").is(name)
.and("clientId").is(client.getId())
.get();
MongoRoleEntity role = getMongoStore().loadSingleEntity(MongoRoleEntity.class, query, invocationContext);
if (role == null) {
return null;
} else {
return session.realms().getRoleById(role.getId(), realm);
}
}
@Override
public RoleModel addClientRole(RealmModel realm, ClientModel client, String name) {
return addClientRole(realm, client, KeycloakModelUtils.generateId(), name);
}
@Override
public RoleModel addClientRole(RealmModel realm, ClientModel client, String id, String name) {
MongoRoleEntity roleEntity = new MongoRoleEntity();
roleEntity.setId(id);
roleEntity.setName(name);
roleEntity.setClientId(client.getId());
getMongoStore().insertEntity(roleEntity, invocationContext);
return new RoleAdapter(session, realm, roleEntity, client, invocationContext);
}
@Override
public boolean removeRole(RealmModel realm, RoleModel role) {
session.users().preRemove(realm, role);
RoleContainerModel container = role.getContainer();
if (container.getDefaultRoles().contains(role.getName())) {
container.removeDefaultRoles(role.getName());
}
return getMongoStore().removeEntity(MongoRoleEntity.class, role.getId(), invocationContext);
}
@Override
public boolean removeClient(String id, RealmModel realm) {
if (id == null) return false;
final ClientModel client = getClientById(id, realm);
if (client == null) return false;
session.users().preRemove(realm, client);
boolean removed = getMongoStore().removeEntity(MongoClientEntity.class, id, invocationContext);
if (removed) {
session.getKeycloakSessionFactory().publish(new RealmModel.ClientRemovedEvent() {
@Override
public ClientModel getClient() {
return client;
}
@Override
public KeycloakSession getKeycloakSession() {
return session;
}
});
}
return removed;
}
@Override
public ClientModel getClientByClientId(String clientId, RealmModel realm) {
DBObject query = new QueryBuilder()
.and("realmId").is(realm.getId())
.and("clientId").is(clientId)
.get();
MongoClientEntity appEntity = getMongoStore().loadSingleEntity(MongoClientEntity.class, query, invocationContext);
if (appEntity == null) return null;
return session.realms().getClientById(appEntity.getId(), realm);
}
@Override
public ClientTemplateModel getClientTemplateById(String id, RealmModel realm) {
MongoClientTemplateEntity appData = getMongoStore().loadEntity(MongoClientTemplateEntity.class, id, invocationContext);
// Check if application belongs to this realm
if (appData == null || !realm.getId().equals(appData.getRealmId())) {
return null;
}
return new ClientTemplateAdapter(session, realm, appData, invocationContext);
}
}

View file

@ -1,62 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.adapters;
import org.jboss.logging.Logger;
import org.keycloak.Config;
import org.keycloak.connections.mongo.MongoConnectionProvider;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.KeycloakSessionFactory;
import org.keycloak.models.RealmProvider;
import org.keycloak.models.RealmProviderFactory;
/**
* KeycloakSessionFactory implementation based on MongoDB
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class MongoRealmProviderFactory implements RealmProviderFactory {
protected static final Logger logger = Logger.getLogger(MongoRealmProviderFactory.class);
@Override
public String getId() {
return "mongo";
}
@Override
public void init(Config.Scope config) {
}
@Override
public void postInit(KeycloakSessionFactory factory) {
}
@Override
public RealmProvider create(KeycloakSession session) {
MongoConnectionProvider connection = session.getProvider(MongoConnectionProvider.class);
return new MongoRealmProvider(session, connection.getInvocationContext());
}
@Override
public void close() {
}
}

View file

@ -1,864 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.adapters;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import com.mongodb.QueryBuilder;
import org.keycloak.common.util.MultivaluedHashMap;
import org.keycloak.common.util.Time;
import org.keycloak.component.ComponentModel;
import org.keycloak.connections.mongo.api.MongoStore;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.credential.CredentialModel;
import org.keycloak.credential.UserCredentialStore;
import org.keycloak.models.ClientModel;
import org.keycloak.models.FederatedIdentityModel;
import org.keycloak.models.GroupModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.ModelDuplicateException;
import org.keycloak.models.ModelException;
import org.keycloak.models.ProtocolMapperModel;
import org.keycloak.models.RealmModel;
import org.keycloak.models.RequiredActionProviderModel;
import org.keycloak.models.RoleModel;
import org.keycloak.models.UserConsentModel;
import org.keycloak.models.UserManager;
import org.keycloak.models.UserModel;
import org.keycloak.models.UserProvider;
import org.keycloak.models.cache.CachedUserModel;
import org.keycloak.models.mongo.keycloak.entities.CredentialEntity;
import org.keycloak.models.mongo.keycloak.entities.FederatedIdentityEntity;
import org.keycloak.models.mongo.keycloak.entities.MongoUserConsentEntity;
import org.keycloak.models.mongo.keycloak.entities.MongoUserEntity;
import org.keycloak.models.mongo.keycloak.entities.UserConsentEntity;
import org.keycloak.models.utils.KeycloakModelUtils;
import org.keycloak.models.utils.UserModelDelegate;
import org.keycloak.storage.UserStorageProvider;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class MongoUserProvider implements UserProvider, UserCredentialStore {
private final MongoStoreInvocationContext invocationContext;
private final KeycloakSession session;
public MongoUserProvider(KeycloakSession session, MongoStoreInvocationContext invocationContext) {
this.session = session;
this.invocationContext = invocationContext;
}
@Override
public void close() {
}
@Override
public UserAdapter getUserById(String id, RealmModel realm) {
MongoUserEntity user = getMongoStore().loadEntity(MongoUserEntity.class, id, invocationContext);
// Check that it's user from this realm
if (user == null || !realm.getId().equals(user.getRealmId())) {
return null;
} else {
return new UserAdapter(session, realm, user, invocationContext);
}
}
@Override
public UserModel getUserByUsername(String username, RealmModel realm) {
DBObject query = new QueryBuilder()
.and("username").is(username.toLowerCase())
.and("realmId").is(realm.getId())
.get();
MongoUserEntity user = getMongoStore().loadSingleEntity(MongoUserEntity.class, query, invocationContext);
if (user == null) {
return null;
} else {
return new UserAdapter(session, realm, user, invocationContext);
}
}
@Override
public UserModel getUserByEmail(String email, RealmModel realm) {
DBObject query = new QueryBuilder()
.and("email").is(email.toLowerCase())
.and("realmId").is(realm.getId())
.get();
List<MongoUserEntity> users = getMongoStore().loadEntities(MongoUserEntity.class, query, invocationContext);
if (users.isEmpty()) return null;
ensureEmailConstraint(users, realm);
return new UserAdapter(session, realm, users.get(0), invocationContext);
}
@Override
public List<UserModel> getGroupMembers(RealmModel realm, GroupModel group, int firstResult, int maxResults) {
QueryBuilder queryBuilder = new QueryBuilder()
.and("realmId").is(realm.getId());
queryBuilder.and("groupIds").is(group.getId());
DBObject sort = new BasicDBObject("username", 1);
List<MongoUserEntity> users = getMongoStore().loadEntities(MongoUserEntity.class, queryBuilder.get(), sort, firstResult, maxResults, invocationContext);
return convertUserEntities(realm, users);
}
protected MongoStore getMongoStore() {
return invocationContext.getMongoStore();
}
@Override
public List<UserModel> getGroupMembers(RealmModel realm, GroupModel group) {
return getGroupMembers(realm, group, -1, -1);
}
@Override
public UserModel getUserByFederatedIdentity(FederatedIdentityModel socialLink, RealmModel realm) {
DBObject query = new QueryBuilder()
.and("federatedIdentities.identityProvider").is(socialLink.getIdentityProvider())
.and("federatedIdentities.userId").is(socialLink.getUserId())
.and("realmId").is(realm.getId())
.get();
MongoUserEntity userEntity = getMongoStore().loadSingleEntity(MongoUserEntity.class, query, invocationContext);
return userEntity == null ? null : new UserAdapter(session, realm, userEntity, invocationContext);
}
@Override
public UserModel getServiceAccount(ClientModel client) {
DBObject query = new QueryBuilder()
.and("serviceAccountClientLink").is(client.getId())
.and("realmId").is(client.getRealm().getId())
.get();
MongoUserEntity userEntity = getMongoStore().loadSingleEntity(MongoUserEntity.class, query, invocationContext);
return userEntity == null ? null : new UserAdapter(session, client.getRealm(), userEntity, invocationContext);
}
protected List<UserModel> convertUserEntities(RealmModel realm, List<MongoUserEntity> userEntities) {
List<UserModel> userModels = new ArrayList<UserModel>();
for (MongoUserEntity user : userEntities) {
userModels.add(new UserAdapter(session, realm, user, invocationContext));
}
return userModels;
}
@Override
public List<UserModel> getUsers(RealmModel realm) {
return getUsers(realm, false);
}
@Override
public List<UserModel> getUsers(RealmModel realm, int firstResult, int maxResults) {
return getUsers(realm, firstResult, maxResults, false);
}
@Override
public List<UserModel> getUsers(RealmModel realm, boolean includeServiceAccounts) {
return getUsers(realm, -1, -1, includeServiceAccounts);
}
@Override
public int getUsersCount(RealmModel realm) {
DBObject query = new QueryBuilder()
.and("realmId").is(realm.getId())
.get();
return getMongoStore().countEntities(MongoUserEntity.class, query, invocationContext);
}
@Override
public List<UserModel> getUsers(RealmModel realm, int firstResult, int maxResults, boolean includeServiceAccounts) {
QueryBuilder queryBuilder = new QueryBuilder()
.and("realmId").is(realm.getId());
if (!includeServiceAccounts) {
queryBuilder = queryBuilder.and("serviceAccountClientLink").is(null);
}
DBObject query = queryBuilder.get();
DBObject sort = new BasicDBObject("username", 1);
List<MongoUserEntity> users = getMongoStore().loadEntities(MongoUserEntity.class, query, sort, firstResult, maxResults, invocationContext);
return convertUserEntities(realm, users);
}
@Override
public List<UserModel> searchForUser(String search, RealmModel realm) {
return searchForUser(search, realm, -1, -1);
}
@Override
public List<UserModel>
searchForUser(String search, RealmModel realm, int firstResult, int maxResults) {
search = search.trim();
Pattern caseInsensitivePattern = Pattern.compile("(?i:" + search + ")");
QueryBuilder nameBuilder;
int spaceInd = search.lastIndexOf(" ");
// Case when we have search string like "ohn Bow". Then firstName must end with "ohn" AND lastName must start with "bow" (everything case-insensitive)
if (spaceInd != -1) {
String firstName = search.substring(0, spaceInd);
String lastName = search.substring(spaceInd + 1);
Pattern firstNamePattern = Pattern.compile("(?i:" + firstName + "$)");
Pattern lastNamePattern = Pattern.compile("(?i:^" + lastName + ")");
nameBuilder = new QueryBuilder().and(
new QueryBuilder().put("firstName").regex(firstNamePattern).get(),
new QueryBuilder().put("lastName").regex(lastNamePattern).get()
);
} else {
// Case when we have search without spaces like "foo". The firstName OR lastName could be "foo" (everything case-insensitive)
nameBuilder = new QueryBuilder().or(
new QueryBuilder().put("firstName").regex(caseInsensitivePattern).get(),
new QueryBuilder().put("lastName").regex(caseInsensitivePattern).get()
);
}
QueryBuilder builder = new QueryBuilder().and(
new QueryBuilder().and("realmId").is(realm.getId()).get(),
new QueryBuilder().and("serviceAccountClientLink").is(null).get(),
new QueryBuilder().or(
new QueryBuilder().put("username").regex(caseInsensitivePattern).get(),
new QueryBuilder().put("email").regex(caseInsensitivePattern).get(),
nameBuilder.get()
).get()
);
DBObject sort = new BasicDBObject("username", 1);
List<MongoUserEntity> users = getMongoStore().loadEntities(MongoUserEntity.class, builder.get(), sort, firstResult, maxResults, invocationContext);
return convertUserEntities(realm, users);
}
@Override
public List<UserModel> searchForUser(Map<String, String> attributes, RealmModel realm) {
return searchForUser(attributes, realm, -1, -1);
}
@Override
public List<UserModel> searchForUser(Map<String, String> attributes, RealmModel realm, int firstResult, int maxResults) {
QueryBuilder queryBuilder = new QueryBuilder()
.and("realmId").is(realm.getId());
for (Map.Entry<String, String> entry : attributes.entrySet()) {
if (entry.getKey().equalsIgnoreCase(UserModel.USERNAME)) {
queryBuilder.and(UserModel.USERNAME).regex(Pattern.compile(".*" + entry.getValue() + ".*", Pattern.CASE_INSENSITIVE));
} else if (entry.getKey().equalsIgnoreCase(UserModel.FIRST_NAME)) {
queryBuilder.and(UserModel.FIRST_NAME).regex(Pattern.compile(".*" + entry.getValue() + ".*", Pattern.CASE_INSENSITIVE));
} else if (entry.getKey().equalsIgnoreCase(UserModel.LAST_NAME)) {
queryBuilder.and(UserModel.LAST_NAME).regex(Pattern.compile(".*" + entry.getValue() + ".*", Pattern.CASE_INSENSITIVE));
} else if (entry.getKey().equalsIgnoreCase(UserModel.EMAIL)) {
queryBuilder.and(UserModel.EMAIL).regex(Pattern.compile(".*" + entry.getValue() + ".*", Pattern.CASE_INSENSITIVE));
}
}
DBObject sort = new BasicDBObject("username", 1);
List<MongoUserEntity> users = getMongoStore().loadEntities(MongoUserEntity.class, queryBuilder.get(), sort, firstResult, maxResults, invocationContext);
return convertUserEntities(realm, users);
}
@Override
public List<UserModel> searchForUserByUserAttribute(String attrName, String attrValue, RealmModel realm) {
QueryBuilder queryBuilder = new QueryBuilder()
.and("realmId").is(realm.getId());
queryBuilder.and("attributes." + attrName).is(attrValue);
List<MongoUserEntity> users = getMongoStore().loadEntities(MongoUserEntity.class, queryBuilder.get(), invocationContext);
return convertUserEntities(realm, users);
}
@Override
public Set<FederatedIdentityModel> getFederatedIdentities(UserModel userModel, RealmModel realm) {
UserAdapter user = getUserById(userModel.getId(), realm);
MongoUserEntity userEntity = user.getUser();
List<FederatedIdentityEntity> linkEntities = userEntity.getFederatedIdentities();
if (linkEntities == null) {
return Collections.EMPTY_SET;
}
Set<FederatedIdentityModel> result = new HashSet<FederatedIdentityModel>();
for (FederatedIdentityEntity federatedIdentityEntity : linkEntities) {
FederatedIdentityModel model = new FederatedIdentityModel(federatedIdentityEntity.getIdentityProvider(),
federatedIdentityEntity.getUserId(), federatedIdentityEntity.getUserName(), federatedIdentityEntity.getToken());
result.add(model);
}
return result;
}
@Override
public FederatedIdentityModel getFederatedIdentity(UserModel user, String socialProvider, RealmModel realm) {
UserAdapter mongoUser = getUserById(user.getId(), realm);
MongoUserEntity userEntity = mongoUser.getUser();
FederatedIdentityEntity federatedIdentityEntity = findFederatedIdentityLink(userEntity, socialProvider);
return federatedIdentityEntity != null ? new FederatedIdentityModel(federatedIdentityEntity.getIdentityProvider(), federatedIdentityEntity.getUserId(),
federatedIdentityEntity.getUserName(), federatedIdentityEntity.getToken()) : null;
}
@Override
public UserAdapter addUser(RealmModel realm, String id, String username, boolean addDefaultRoles, boolean addDefaultRequiredActions) {
UserAdapter userModel = addUserEntity(realm, id, username.toLowerCase());
if (addDefaultRoles) {
for (String r : realm.getDefaultRoles()) {
userModel.grantRole(realm.getRole(r));
}
for (ClientModel application : realm.getClients()) {
for (String r : application.getDefaultRoles()) {
userModel.grantRole(application.getRole(r));
}
}
for (GroupModel g : realm.getDefaultGroups()) {
userModel.joinGroup(g);
}
}
if (addDefaultRequiredActions) {
for (RequiredActionProviderModel r : realm.getRequiredActionProviders()) {
if (r.isEnabled() && r.isDefaultAction()) {
userModel.addRequiredAction(r.getAlias());
}
}
}
return userModel;
}
protected UserAdapter addUserEntity(RealmModel realm, String id, String username) {
MongoUserEntity userEntity = new MongoUserEntity();
userEntity.setId(id);
userEntity.setUsername(username);
userEntity.setCreatedTimestamp(System.currentTimeMillis());
// Compatibility with JPA model, which has user disabled by default
// userEntity.setEnabled(true);
userEntity.setRealmId(realm.getId());
getMongoStore().insertEntity(userEntity, invocationContext);
return new UserAdapter(session, realm, userEntity, invocationContext);
}
@Override
public boolean removeUser(RealmModel realm, UserModel user) {
return getMongoStore().removeEntity(MongoUserEntity.class, user.getId(), invocationContext);
}
@Override
public void addFederatedIdentity(RealmModel realm, UserModel user, FederatedIdentityModel identity) {
UserAdapter mongoUser = getUserById(user.getId(), realm);
MongoUserEntity userEntity = mongoUser.getUser();
FederatedIdentityEntity federatedIdentityEntity = new FederatedIdentityEntity();
federatedIdentityEntity.setIdentityProvider(identity.getIdentityProvider());
federatedIdentityEntity.setUserId(identity.getUserId());
federatedIdentityEntity.setUserName(identity.getUserName().toLowerCase());
federatedIdentityEntity.setToken(identity.getToken());
getMongoStore().pushItemToList(userEntity, "federatedIdentities", federatedIdentityEntity, true, invocationContext);
}
@Override
public void updateFederatedIdentity(RealmModel realm, UserModel federatedUser, FederatedIdentityModel federatedIdentityModel) {
UserAdapter mongoUser = getUserById(federatedUser.getId(), realm);
MongoUserEntity userEntity = mongoUser.getUser();
FederatedIdentityEntity federatedIdentityEntity = findFederatedIdentityLink(userEntity, federatedIdentityModel.getIdentityProvider());
//pushItemToList updates the whole federatedIdentities array in Mongo so we just need to remove this object from the Java
//List and pushItemToList will handle the DB update.
userEntity.getFederatedIdentities().remove(federatedIdentityEntity);
federatedIdentityEntity.setToken(federatedIdentityModel.getToken());
getMongoStore().pushItemToList(userEntity, "federatedIdentities", federatedIdentityEntity, true, invocationContext);
}
@Override
public boolean removeFederatedIdentity(RealmModel realm, UserModel userModel, String socialProvider) {
UserAdapter user = getUserById(userModel.getId(), realm);
MongoUserEntity userEntity = user.getUser();
FederatedIdentityEntity federatedIdentityEntity = findFederatedIdentityLink(userEntity, socialProvider);
if (federatedIdentityEntity == null) {
return false;
}
return getMongoStore().pullItemFromList(userEntity, "federatedIdentities", federatedIdentityEntity, invocationContext);
}
private FederatedIdentityEntity findFederatedIdentityLink(MongoUserEntity userEntity, String identityProvider) {
List<FederatedIdentityEntity> linkEntities = userEntity.getFederatedIdentities();
if (linkEntities == null) {
return null;
}
for (FederatedIdentityEntity federatedIdentityEntity : linkEntities) {
if (federatedIdentityEntity.getIdentityProvider().equals(identityProvider)) {
return federatedIdentityEntity;
}
}
return null;
}
@Override
public UserModel addUser(RealmModel realm, String username) {
return this.addUser(realm, null, username, true, true);
}
@Override
public void grantToAllUsers(RealmModel realm, RoleModel role) {
DBObject query = new QueryBuilder()
.and("realmId").is(realm.getId())
.get();
DBObject update = new QueryBuilder()
.and("$push").is(new BasicDBObject("roleIds", role.getId()))
.get();
int count = getMongoStore().updateEntities(MongoUserEntity.class, query, update, invocationContext);
}
@Override
public void preRemove(RealmModel realm) {
DBObject query = new QueryBuilder()
.and("realmId").is(realm.getId())
.get();
getMongoStore().removeEntities(MongoUserEntity.class, query, true, invocationContext);
}
@Override
public void preRemove(RealmModel realm, ClientModel client) {
// Remove all role mappings and consents mapped to all roles of this client
for (RoleModel role : client.getRoles()) {
preRemove(realm, role);
}
// Finally remove all consents of this client
DBObject query = new QueryBuilder()
.and("clientId").is(client.getId())
.get();
getMongoStore().removeEntities(MongoUserConsentEntity.class, query, false, invocationContext);
}
@Override
public void preRemove(ProtocolMapperModel protocolMapper) {
// Remove this protocol mapper from all consents, which has it
DBObject query = new QueryBuilder()
.and("grantedProtocolMappers").is(protocolMapper.getId())
.get();
DBObject pull = new BasicDBObject("$pull", query);
getMongoStore().updateEntities(MongoUserConsentEntity.class, query, pull, invocationContext);
}
@Override
public void preRemove(RealmModel realm, GroupModel group) {
// Remove this role from all users, which has it
DBObject query = new QueryBuilder()
.and("groupIds").is(group.getId())
.get();
DBObject pull = new BasicDBObject("$pull", query);
getMongoStore().updateEntities(MongoUserEntity.class, query, pull, invocationContext);
}
@Override
public void preRemove(RealmModel realm, RoleModel role) {
// Remove this role from all users, which has it
DBObject query = new QueryBuilder()
.and("roleIds").is(role.getId())
.get();
DBObject pull = new BasicDBObject("$pull", query);
getMongoStore().updateEntities(MongoUserEntity.class, query, pull, invocationContext);
// Remove this role from all consents, which has it
query = new QueryBuilder()
.and("grantedRoles").is(role.getId())
.get();
pull = new BasicDBObject("$pull", query);
getMongoStore().updateEntities(MongoUserConsentEntity.class, query, pull, invocationContext);
}
@Override
public void addConsent(RealmModel realm, String userId, UserConsentModel consent) {
String clientId = consent.getClient().getId();
if (getConsentEntityByClientId(userId, clientId) != null) {
throw new ModelDuplicateException("Consent already exists for client [" + clientId + "] and user [" + userId + "]");
}
long currentTime = Time.currentTimeMillis();
MongoUserConsentEntity consentEntity = new MongoUserConsentEntity();
consentEntity.setUserId(userId);
consentEntity.setClientId(clientId);
consentEntity.setCreatedDate(currentTime);
consentEntity.setLastUpdatedDate(currentTime);
fillEntityFromModel(consent, consentEntity);
getMongoStore().insertEntity(consentEntity, invocationContext);
}
@Override
public UserConsentModel getConsentByClient(RealmModel realm, String userId, String clientId) {
UserConsentEntity consentEntity = getConsentEntityByClientId(userId, clientId);
return consentEntity!=null ? toConsentModel(realm, consentEntity) : null;
}
@Override
public List<UserConsentModel> getConsents(RealmModel realm, String userId) {
List<UserConsentModel> result = new ArrayList<UserConsentModel>();
DBObject query = new QueryBuilder()
.and("userId").is(userId)
.get();
List<MongoUserConsentEntity> grantedConsents = getMongoStore().loadEntities(MongoUserConsentEntity.class, query, invocationContext);
for (UserConsentEntity consentEntity : grantedConsents) {
UserConsentModel model = toConsentModel(realm, consentEntity);
result.add(model);
}
return result;
}
private MongoUserConsentEntity getConsentEntityByClientId(String userId, String clientId) {
DBObject query = new QueryBuilder()
.and("userId").is(userId)
.and("clientId").is(clientId)
.get();
return getMongoStore().loadSingleEntity(MongoUserConsentEntity.class, query, invocationContext);
}
private UserConsentModel toConsentModel(RealmModel realm, UserConsentEntity entity) {
ClientModel client = realm.getClientById(entity.getClientId());
if (client == null) {
throw new ModelException("Client with id " + entity.getClientId() + " is not available");
}
UserConsentModel model = new UserConsentModel(client);
model.setCreatedDate(entity.getCreatedDate());
model.setLastUpdatedDate(entity.getLastUpdatedDate());
for (String roleId : entity.getGrantedRoles()) {
RoleModel roleModel = realm.getRoleById(roleId);
if (roleModel != null) {
model.addGrantedRole(roleModel);
}
}
for (String protMapperId : entity.getGrantedProtocolMappers()) {
ProtocolMapperModel protocolMapper = client.getProtocolMapperById(protMapperId);
model.addGrantedProtocolMapper(protocolMapper);
}
return model;
}
// Fill roles and protocolMappers to entity
private void fillEntityFromModel(UserConsentModel consent, MongoUserConsentEntity consentEntity) {
List<String> roleIds = new LinkedList<String>();
for (RoleModel role : consent.getGrantedRoles()) {
roleIds.add(role.getId());
}
consentEntity.setGrantedRoles(roleIds);
List<String> protMapperIds = new LinkedList<String>();
for (ProtocolMapperModel protMapperModel : consent.getGrantedProtocolMappers()) {
protMapperIds.add(protMapperModel.getId());
}
consentEntity.setGrantedProtocolMappers(protMapperIds);
consentEntity.setLastUpdatedDate(Time.currentTimeMillis());
}
@Override
public void updateConsent(RealmModel realm, String userId, UserConsentModel consent) {
String clientId = consent.getClient().getId();
MongoUserConsentEntity consentEntity = getConsentEntityByClientId(userId, clientId);
if (consentEntity == null) {
throw new ModelException("Consent not found for client [" + clientId + "] and user [" + userId + "]");
} else {
fillEntityFromModel(consent, consentEntity);
getMongoStore().updateEntity(consentEntity, invocationContext);
}
}
@Override
public boolean revokeConsentForClient(RealmModel realm, String userId, String clientId) {
MongoUserConsentEntity entity = getConsentEntityByClientId(userId, clientId);
if (entity == null) {
return false;
}
return getMongoStore().removeEntity(entity, invocationContext);
}
@Override
public void preRemove(RealmModel realm, ComponentModel component) {
if (!component.getProviderType().equals(UserStorageProvider.class.getName())) return;
String providerId = component.getId();
removeImportedUsers(realm, providerId);
}
@Override
public void removeImportedUsers(RealmModel realm, String providerId) {
DBObject query = new QueryBuilder()
.and("federationLink").is(providerId)
.get();
List<MongoUserEntity> mongoUsers = getMongoStore().loadEntities(MongoUserEntity.class, query, invocationContext);
UserManager userManager = new UserManager(session);
for (MongoUserEntity userEntity : mongoUsers) {
// Doing this way to ensure UserRemovedEvent triggered with proper callbacks.
UserAdapter user = new UserAdapter(session, realm, userEntity, invocationContext);
userManager.removeUser(realm, user, this);
}
}
@Override
public void unlinkUsers(RealmModel realm, String storageProviderId) {
DBObject query = new QueryBuilder()
.and("federationLink").is(storageProviderId)
.get();
List<MongoUserEntity> mongoUsers = getMongoStore().loadEntities(MongoUserEntity.class, query, invocationContext);
for (MongoUserEntity userEntity : mongoUsers) {
// Doing this way to ensure UserRemovedEvent triggered with proper callbacks.
UserAdapter user = new UserAdapter(session, realm, userEntity, invocationContext);
user.setFederationLink(null);
}
}
@Override
public void updateCredential(RealmModel realm, UserModel user, CredentialModel cred) {
MongoUserEntity mongoUser = getMongoUserEntity(user);
CredentialEntity credentialEntity = getCredentialEntity(cred, mongoUser);
if (credentialEntity == null) return;
// old store may not have id set
if (credentialEntity.getId() == null) credentialEntity.setId(KeycloakModelUtils.generateId());
setValues(cred, credentialEntity);
getMongoStore().updateEntity(mongoUser, invocationContext);
}
public CredentialEntity getCredentialEntity(CredentialModel cred, MongoUserEntity mongoUser) {
CredentialEntity credentialEntity = null;
// old store may not have id set
for (CredentialEntity entity : mongoUser.getCredentials()) {
if (cred.getId() != null && cred.getId().equals(entity.getId())) {
credentialEntity = entity;
break;
} else if (cred.getType().equals(entity.getType())) {
credentialEntity = entity;
break;
}
}
return credentialEntity;
}
public MongoUserEntity getMongoUserEntity(UserModel user) {
if (user instanceof UserAdapter) {
UserAdapter adapter = (UserAdapter)user;
return adapter.getMongoEntity();
} else if (user instanceof CachedUserModel) {
UserModel delegate = ((CachedUserModel)user).getDelegateForUpdate();
return getMongoUserEntity(delegate);
} else if (user instanceof UserModelDelegate){
UserModel delegate = ((UserModelDelegate) user).getDelegate();
return getMongoUserEntity(delegate);
} else {
return getMongoStore().loadEntity(MongoUserEntity.class, user.getId(), invocationContext);
}
}
@Override
public CredentialModel createCredential(RealmModel realm, UserModel user, CredentialModel cred) {
MongoUserEntity mongoUser = getMongoUserEntity(user);
CredentialEntity credentialEntity = new CredentialEntity();
credentialEntity.setId(KeycloakModelUtils.generateId());
setValues(cred, credentialEntity);
cred.setId(credentialEntity.getId());
mongoUser.getCredentials().add(credentialEntity);
getMongoStore().updateEntity(mongoUser, invocationContext);
cred.setId(credentialEntity.getId());
return cred;
}
public void setValues(CredentialModel cred, CredentialEntity credentialEntity) {
credentialEntity.setType(cred.getType());
credentialEntity.setDevice(cred.getDevice());
credentialEntity.setValue(cred.getValue());
credentialEntity.setSalt(cred.getSalt());
credentialEntity.setDevice(cred.getDevice());
credentialEntity.setHashIterations(cred.getHashIterations());
credentialEntity.setCounter(cred.getCounter());
credentialEntity.setAlgorithm(cred.getAlgorithm());
credentialEntity.setDigits(cred.getDigits());
credentialEntity.setPeriod(cred.getPeriod());
if (cred.getConfig() == null) {
credentialEntity.setConfig(null);
}
else {
if (credentialEntity.getConfig() == null) credentialEntity.setConfig(new MultivaluedHashMap<>());
credentialEntity.getConfig().clear();
credentialEntity.getConfig().putAll(cred.getConfig());
}
}
@Override
public boolean removeStoredCredential(RealmModel realm, UserModel user, String id) {
MongoUserEntity mongoUser = getMongoUserEntity(user);
Iterator<CredentialEntity> it = mongoUser.getCredentials().iterator();
while (it.hasNext()) {
CredentialEntity entity = it.next();
if (id.equals(entity.getId())) {
it.remove();
getMongoStore().updateEntity(mongoUser, invocationContext);
return true;
}
}
return false;
}
@Override
public CredentialModel getStoredCredentialById(RealmModel realm, UserModel user, String id) {
MongoUserEntity mongoUser = getMongoUserEntity(user);
for (CredentialEntity credEntity : mongoUser.getCredentials()) {
if(id.equals(credEntity.getId())) {
if (credEntity.getId() == null) {
credEntity.setId(KeycloakModelUtils.generateId());
getMongoStore().updateEntity(mongoUser, invocationContext);
}
return toModel(credEntity);
}
}
return null;
}
public CredentialModel toModel(CredentialEntity credEntity) {
CredentialModel credModel = new CredentialModel();
credModel.setId(credEntity.getId());
credModel.setType(credEntity.getType());
credModel.setDevice(credEntity.getDevice());
credModel.setCreatedDate(credEntity.getCreatedDate());
credModel.setValue(credEntity.getValue());
credModel.setSalt(credEntity.getSalt());
credModel.setHashIterations(credEntity.getHashIterations());
credModel.setAlgorithm(credEntity.getAlgorithm());
credModel.setCounter(credEntity.getCounter());
credModel.setPeriod(credEntity.getPeriod());
credModel.setDigits(credEntity.getDigits());
if (credEntity.getConfig() != null) {
credModel.setConfig(new MultivaluedHashMap<>());
credModel.getConfig().putAll(credEntity.getConfig());
}
return credModel;
}
@Override
public List<CredentialModel> getStoredCredentials(RealmModel realm, UserModel user) {
List<CredentialModel> list = new LinkedList<>();
MongoUserEntity mongoUser = getMongoUserEntity(user);
boolean update = false;
for (CredentialEntity credEntity : mongoUser.getCredentials()) {
if (credEntity.getId() == null) {
credEntity.setId(KeycloakModelUtils.generateId());
update = true;
}
CredentialModel credModel = toModel(credEntity);
list.add(credModel);
}
if (update) getMongoStore().updateEntity(mongoUser, invocationContext);
return list;
}
@Override
public List<CredentialModel> getStoredCredentialsByType(RealmModel realm, UserModel user, String type) {
List<CredentialModel> list = new LinkedList<>();
MongoUserEntity mongoUser = getMongoUserEntity(user);
boolean update = false;
for (CredentialEntity credEntity : mongoUser.getCredentials()) {
if (credEntity.getId() == null) {
credEntity.setId(KeycloakModelUtils.generateId());
update = true;
}
if (credEntity.getType().equals(type)) {
CredentialModel credModel = toModel(credEntity);
list.add(credModel);
}
}
if (update) getMongoStore().updateEntity(mongoUser, invocationContext);
return list;
}
@Override
public CredentialModel getStoredCredentialByNameAndType(RealmModel realm, UserModel user, String name, String type) {
MongoUserEntity mongoUser = getMongoUserEntity(user);
boolean update = false;
CredentialModel credModel = null;
for (CredentialEntity credEntity : mongoUser.getCredentials()) {
if (credEntity.getId() == null) {
credEntity.setId(KeycloakModelUtils.generateId());
update = true;
}
if (credEntity.getType().equals(type) && name.equals(credEntity.getDevice())) {
credModel = toModel(credEntity);
break;
}
}
if (update) getMongoStore().updateEntity(mongoUser, invocationContext);
return credModel;
}
// Could override this to provide a custom behavior.
protected void ensureEmailConstraint(List<MongoUserEntity> users, RealmModel realm) {
MongoUserEntity user = users.get(0);
if (users.size() > 1) {
// Realm settings have been changed from allowing duplicate emails to not allowing them
// but duplicates haven't been removed.
throw new ModelDuplicateException("Multiple users with email '" + user.getEmail() + "' exist in Keycloak.");
}
if (realm.isDuplicateEmailsAllowed()) {
return;
}
if (user.getEmail() != null && user.getEmailIndex() == null) {
// Realm settings have been changed from allowing duplicate emails to not allowing them.
// We need to update the email index to reflect this change in the user entities.
user.setEmail(user.getEmail(), false);
getMongoStore().updateEntity(user, invocationContext);
}
}
}

View file

@ -1,61 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.adapters;
import org.jboss.logging.Logger;
import org.keycloak.Config;
import org.keycloak.connections.mongo.MongoConnectionProvider;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.KeycloakSessionFactory;
import org.keycloak.models.UserProvider;
import org.keycloak.models.UserProviderFactory;
/**
* KeycloakSessionFactory implementation based on MongoDB
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class MongoUserProviderFactory implements UserProviderFactory {
protected static final Logger logger = Logger.getLogger(MongoUserProviderFactory.class);
@Override
public String getId() {
return "mongo";
}
@Override
public void init(Config.Scope config) {
}
@Override
public void postInit(KeycloakSessionFactory factory) {
}
@Override
public UserProvider create(KeycloakSession session) {
MongoConnectionProvider connection = session.getProvider(MongoConnectionProvider.class);
return new MongoUserProvider(session, connection.getInvocationContext());
}
@Override
public void close() {
}
}

View file

@ -1,335 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.adapters;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import com.mongodb.QueryBuilder;
import org.keycloak.connections.mongo.api.MongoStore;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.ClientModel;
import org.keycloak.models.ClientSessionModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.ModelException;
import org.keycloak.models.RealmModel;
import org.keycloak.models.UserModel;
import org.keycloak.models.UserSessionModel;
import org.keycloak.models.mongo.keycloak.entities.MongoOfflineUserSessionEntity;
import org.keycloak.models.mongo.keycloak.entities.MongoOnlineUserSessionEntity;
import org.keycloak.models.mongo.keycloak.entities.MongoUserSessionEntity;
import org.keycloak.models.mongo.keycloak.entities.PersistentClientSessionEntity;
import org.keycloak.models.mongo.keycloak.entities.PersistentUserSessionEntity;
import org.keycloak.models.session.PersistentClientSessionAdapter;
import org.keycloak.models.session.PersistentClientSessionModel;
import org.keycloak.models.session.PersistentUserSessionAdapter;
import org.keycloak.models.session.PersistentUserSessionModel;
import org.keycloak.models.session.UserSessionPersisterProvider;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class MongoUserSessionPersisterProvider implements UserSessionPersisterProvider {
private final MongoStoreInvocationContext invocationContext;
private final KeycloakSession session;
public MongoUserSessionPersisterProvider(KeycloakSession session, MongoStoreInvocationContext invocationContext) {
this.session = session;
this.invocationContext = invocationContext;
}
protected MongoStore getMongoStore() {
return invocationContext.getMongoStore();
}
private MongoUserSessionEntity loadUserSession(String userSessionId, boolean offline) {
Class<? extends MongoUserSessionEntity> clazz = offline ? MongoOfflineUserSessionEntity.class : MongoOnlineUserSessionEntity.class;
return getMongoStore().loadEntity(clazz, userSessionId, invocationContext);
}
@Override
public void createUserSession(UserSessionModel userSession, boolean offline) {
PersistentUserSessionAdapter adapter = new PersistentUserSessionAdapter(userSession);
PersistentUserSessionModel model = adapter.getUpdatedModel();
MongoUserSessionEntity entity = offline ? new MongoOfflineUserSessionEntity() : new MongoOnlineUserSessionEntity();
entity.setId(model.getUserSessionId());
entity.setRealmId(adapter.getRealm().getId());
entity.setUserId(adapter.getUser().getId());
entity.setLastSessionRefresh(model.getLastSessionRefresh());
entity.setData(model.getData());
entity.setClientSessions(new ArrayList<PersistentClientSessionEntity>());
getMongoStore().insertEntity(entity, invocationContext);
}
@Override
public void createClientSession(ClientSessionModel clientSession, boolean offline) {
PersistentClientSessionAdapter adapter = new PersistentClientSessionAdapter(clientSession);
PersistentClientSessionModel model = adapter.getUpdatedModel();
MongoUserSessionEntity userSession = loadUserSession(model.getUserSessionId(), offline);
if (userSession == null) {
throw new ModelException("Not userSession found with ID " + clientSession.getUserSession().getId() + ". Requested by clientSession: " + clientSession.getId());
} else {
PersistentClientSessionEntity entity = new PersistentClientSessionEntity();
entity.setClientSessionId(clientSession.getId());
entity.setClientId(clientSession.getClient().getId());
entity.setData(model.getData());
userSession.getClientSessions().add(entity);
getMongoStore().updateEntity(userSession, invocationContext);
}
}
@Override
public void updateUserSession(UserSessionModel userSession, boolean offline) {
PersistentUserSessionAdapter adapter;
if (userSession instanceof PersistentUserSessionAdapter) {
adapter = (PersistentUserSessionAdapter) userSession;
} else {
adapter = new PersistentUserSessionAdapter(userSession);
}
PersistentUserSessionModel model = adapter.getUpdatedModel();
MongoUserSessionEntity entity = loadUserSession(model.getUserSessionId(), offline);
if (entity == null) {
throw new ModelException("UserSession with ID " + userSession.getId() + ", offline: " + offline + " not found");
}
entity.setLastSessionRefresh(model.getLastSessionRefresh());
entity.setData(model.getData());
getMongoStore().updateEntity(entity, invocationContext);
}
@Override
public void removeUserSession(String userSessionId, boolean offline) {
MongoUserSessionEntity entity = loadUserSession(userSessionId, offline);
if (entity != null) {
getMongoStore().removeEntity(entity, invocationContext);
}
}
@Override
public void removeClientSession(String clientSessionId, boolean offline) {
DBObject query = new QueryBuilder()
.and("clientSessions.clientSessionId").is(clientSessionId)
.get();
Class<? extends MongoUserSessionEntity> clazz = offline ? MongoOfflineUserSessionEntity.class : MongoOnlineUserSessionEntity.class;
MongoUserSessionEntity userSession = getMongoStore().loadSingleEntity(clazz, query, invocationContext);
if (userSession != null) {
PersistentClientSessionEntity found = null;
for (PersistentClientSessionEntity clientSession : userSession.getClientSessions()) {
if (clientSession.getClientSessionId().equals(clientSessionId)) {
found = clientSession;
break;
}
}
if (found != null) {
userSession.getClientSessions().remove(found);
// Remove userSession if it was last clientSession attached
if (userSession.getClientSessions().size() == 0) {
getMongoStore().removeEntity(userSession, invocationContext);
} else {
getMongoStore().updateEntity(userSession, invocationContext);
}
}
}
}
@Override
public void onRealmRemoved(RealmModel realm) {
DBObject query = new QueryBuilder()
.and("realmId").is(realm.getId())
.get();
getMongoStore().removeEntities(MongoOnlineUserSessionEntity.class, query, false, invocationContext);
getMongoStore().removeEntities(MongoOfflineUserSessionEntity.class, query, false, invocationContext);
}
@Override
public void onClientRemoved(RealmModel realm, ClientModel client) {
DBObject query = new QueryBuilder()
.and("clientSessions.clientId").is(client.getId())
.get();
List<MongoOnlineUserSessionEntity> userSessions = getMongoStore().loadEntities(MongoOnlineUserSessionEntity.class, query, invocationContext);
for (MongoOnlineUserSessionEntity userSession : userSessions) {
removeClientSessionOfClient(userSession, client.getId());
}
List<MongoOfflineUserSessionEntity> userSessions2 = getMongoStore().loadEntities(MongoOfflineUserSessionEntity.class, query, invocationContext);
for (MongoOfflineUserSessionEntity userSession : userSessions2) {
removeClientSessionOfClient(userSession, client.getId());
}
}
private void removeClientSessionOfClient(MongoUserSessionEntity userSession, String clientId) {
PersistentClientSessionEntity found = null;
for (PersistentClientSessionEntity clientSession : userSession.getClientSessions()) {
if (clientSession.getClientId().equals(clientId)) {
found = clientSession;
break;
}
}
if (found != null) {
userSession.getClientSessions().remove(found);
// Remove userSession if it was last clientSession attached
if (userSession.getClientSessions().size() == 0) {
getMongoStore().removeEntity(userSession, invocationContext);
} else {
getMongoStore().updateEntity(userSession, invocationContext);
}
}
}
@Override
public void onUserRemoved(RealmModel realm, UserModel user) {
onUserRemoved(realm, user.getId());
}
private void onUserRemoved(RealmModel realm, String userId) {
DBObject query = new QueryBuilder()
.and("userId").is(userId)
.get();
getMongoStore().removeEntities(MongoOnlineUserSessionEntity.class, query, false, invocationContext);
getMongoStore().removeEntities(MongoOfflineUserSessionEntity.class, query, false, invocationContext);
}
@Override
public void clearDetachedUserSessions() {
DBObject query = new QueryBuilder()
.and("clientSessions").is(Collections.emptyList())
.get();
getMongoStore().removeEntities(MongoOnlineUserSessionEntity.class, query, false, invocationContext);
getMongoStore().removeEntities(MongoOfflineUserSessionEntity.class, query, false, invocationContext);
}
@Override
public int getUserSessionsCount(boolean offline) {
DBObject query = new QueryBuilder()
.get();
Class<? extends MongoUserSessionEntity> clazz = offline ? MongoOfflineUserSessionEntity.class : MongoOnlineUserSessionEntity.class;
return getMongoStore().countEntities(clazz, query, invocationContext);
}
@Override
public void updateAllTimestamps(int time) {
// 1) Update timestamp of clientSessions
DBObject timestampSubquery = new QueryBuilder()
.and("timestamp").notEquals(time).get();
DBObject query = new QueryBuilder()
.and("clientSessions").elemMatch(timestampSubquery).get();
DBObject update = new QueryBuilder()
.and("$set").is(new BasicDBObject("clientSessions.$.timestamp", time)).get();
// Not sure how to do in single query :/
int countModified = 1;
while (countModified > 0) {
countModified = getMongoStore().updateEntities(MongoOfflineUserSessionEntity.class, query, update, invocationContext);
}
countModified = 1;
while (countModified > 0) {
countModified = getMongoStore().updateEntities(MongoOnlineUserSessionEntity.class, query, update, invocationContext);
}
// 2) update lastSessionRefresh of userSessions
query = new QueryBuilder().get();
update = new QueryBuilder()
.and("$set").is(new BasicDBObject("lastSessionRefresh", time)).get();
getMongoStore().updateEntities(MongoOfflineUserSessionEntity.class, query, update, invocationContext);
getMongoStore().updateEntities(MongoOnlineUserSessionEntity.class, query, update, invocationContext);
}
@Override
public List<UserSessionModel> loadUserSessions(int firstResult, int maxResults, boolean offline) {
DBObject query = new QueryBuilder()
.get();
DBObject sort = new BasicDBObject("id", 1);
Class<? extends MongoUserSessionEntity> clazz = offline ? MongoOfflineUserSessionEntity.class : MongoOnlineUserSessionEntity.class;
List<? extends MongoUserSessionEntity> entities = getMongoStore().loadEntities(clazz, query, sort, firstResult, maxResults, invocationContext);
List<UserSessionModel> results = new LinkedList<>();
for (MongoUserSessionEntity entity : entities) {
RealmModel realm = session.realms().getRealm(entity.getRealmId());
UserModel user = session.users().getUserById(entity.getUserId(), realm);
// Case when user was deleted in the meantime
if (user == null) {
onUserRemoved(realm, entity.getUserId());
return loadUserSessions(firstResult, maxResults, offline);
}
PersistentUserSessionAdapter userSession = toAdapter(realm, user, entity);
results.add(userSession);
}
return results;
}
private PersistentUserSessionAdapter toAdapter(RealmModel realm, UserModel user, PersistentUserSessionEntity entity) {
PersistentUserSessionModel model = new PersistentUserSessionModel();
model.setUserSessionId(entity.getId());
model.setLastSessionRefresh(entity.getLastSessionRefresh());
model.setData(entity.getData());
List<ClientSessionModel> clientSessions = new LinkedList<>();
PersistentUserSessionAdapter userSessionAdapter = new PersistentUserSessionAdapter(model, realm, user, clientSessions);
for (PersistentClientSessionEntity clientSessEntity : entity.getClientSessions()) {
PersistentClientSessionAdapter clientSessAdapter = toAdapter(realm, userSessionAdapter, clientSessEntity);
clientSessions.add(clientSessAdapter);
}
return userSessionAdapter;
}
private PersistentClientSessionAdapter toAdapter(RealmModel realm, PersistentUserSessionAdapter userSession, PersistentClientSessionEntity entity) {
ClientModel client = realm.getClientById(entity.getClientId());
PersistentClientSessionModel model = new PersistentClientSessionModel();
model.setClientSessionId(entity.getClientSessionId());
model.setClientId(entity.getClientId());
model.setUserSessionId(userSession.getId());
model.setUserId(userSession.getUser().getId());
model.setTimestamp(entity.getTimestamp());
model.setData(entity.getData());
return new PersistentClientSessionAdapter(model, realm, client, userSession);
}
@Override
public void close() {
}
}

View file

@ -1,54 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.adapters;
import org.keycloak.Config;
import org.keycloak.connections.mongo.MongoConnectionProvider;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.KeycloakSessionFactory;
import org.keycloak.models.session.UserSessionPersisterProvider;
import org.keycloak.models.session.UserSessionPersisterProviderFactory;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class MongoUserSessionPersisterProviderFactory implements UserSessionPersisterProviderFactory {
public static final String ID = "mongo";
@Override
public UserSessionPersisterProvider create(KeycloakSession session) {
MongoConnectionProvider connection = session.getProvider(MongoConnectionProvider.class);
return new MongoUserSessionPersisterProvider(session, connection.getInvocationContext());
}
@Override
public void init(Config.Scope config) {
}
@Override
public void close() {
}
@Override
public String getId() {
return ID;
}
}

View file

@ -1,201 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.adapters;
import com.mongodb.DBObject;
import com.mongodb.QueryBuilder;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.RoleContainerModel;
import org.keycloak.models.RoleModel;
import org.keycloak.models.mongo.keycloak.entities.MongoClientEntity;
import org.keycloak.models.mongo.keycloak.entities.MongoRealmEntity;
import org.keycloak.models.mongo.keycloak.entities.MongoRoleEntity;
import org.keycloak.models.utils.KeycloakModelUtils;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Wrapper around RoleData object, which will persist wrapped object after each set operation (compatibility with picketlink based idm)
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class RoleAdapter extends AbstractMongoAdapter<MongoRoleEntity> implements RoleModel {
private final MongoRoleEntity role;
private RoleContainerModel roleContainer;
private RealmModel realm;
private KeycloakSession session;
public RoleAdapter(KeycloakSession session, RealmModel realm, MongoRoleEntity roleEntity, MongoStoreInvocationContext invContext) {
this(session, realm, roleEntity, null, invContext);
}
public RoleAdapter(KeycloakSession session, RealmModel realm, MongoRoleEntity roleEntity, RoleContainerModel roleContainer, MongoStoreInvocationContext invContext) {
super(invContext);
this.role = roleEntity;
this.roleContainer = roleContainer;
this.realm = realm;
this.session = session;
}
@Override
public String getId() {
return role.getId();
}
@Override
public String getName() {
return role.getName();
}
@Override
public void setName(String name) {
role.setName(name);
updateRole();
}
@Override
public String getDescription() {
return role.getDescription();
}
@Override
public void setDescription(String description) {
role.setDescription(description);
updateRole();
}
@Override
public boolean isScopeParamRequired() {
return role.isScopeParamRequired();
}
@Override
public void setScopeParamRequired(boolean scopeParamRequired) {
role.setScopeParamRequired(scopeParamRequired);
updateRole();
}
@Override
public boolean isComposite() {
return role.getCompositeRoleIds() != null && role.getCompositeRoleIds().size() > 0;
}
protected void updateRole() {
super.updateMongoEntity();
}
@Override
public void addCompositeRole(RoleModel childRole) {
getMongoStore().pushItemToList(role, "compositeRoleIds", childRole.getId(), true, invocationContext);
}
@Override
public void removeCompositeRole(RoleModel childRole) {
getMongoStore().pullItemFromList(role, "compositeRoleIds", childRole.getId(), invocationContext);
}
@Override
public Set<RoleModel> getComposites() {
if (role.getCompositeRoleIds() == null || role.getCompositeRoleIds().isEmpty()) {
return Collections.EMPTY_SET;
}
DBObject query = new QueryBuilder()
.and("_id").in(role.getCompositeRoleIds())
.get();
List<MongoRoleEntity> childRoles = getMongoStore().loadEntities(MongoRoleEntity.class, query, invocationContext);
Set<RoleModel> set = new HashSet<RoleModel>();
for (MongoRoleEntity childRole : childRoles) {
set.add(new RoleAdapter(session, realm, childRole, invocationContext));
}
return set;
}
@Override
public boolean isClientRole() {
return role.getClientId() != null;
}
@Override
public String getContainerId() {
if (isClientRole()) return role.getClientId();
else return role.getRealmId();
}
@Override
public RoleContainerModel getContainer() {
if (roleContainer == null) {
// Compute it
if (role.getRealmId() != null) {
MongoRealmEntity realm = getMongoStore().loadEntity(MongoRealmEntity.class, role.getRealmId(), invocationContext);
if (realm == null) {
throw new IllegalStateException("Realm with id: " + role.getRealmId() + " doesn't exists");
}
roleContainer = new RealmAdapter(session, realm, invocationContext);
} else if (role.getClientId() != null) {
MongoClientEntity appEntity = getMongoStore().loadEntity(MongoClientEntity.class, role.getClientId(), invocationContext);
if (appEntity == null) {
throw new IllegalStateException("Application with id: " + role.getClientId() + " doesn't exists");
}
roleContainer = new ClientAdapter(session, realm, appEntity, invocationContext);
} else {
throw new IllegalStateException("Both realmId and clientId are null for role: " + this);
}
}
return roleContainer;
}
@Override
public boolean hasRole(RoleModel role) {
return this.equals(role) || KeycloakModelUtils.searchFor(role, this, new HashSet<>());
}
public MongoRoleEntity getRole() {
return role;
}
@Override
public MongoRoleEntity getMongoEntity() {
return role;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || !(o instanceof RoleModel)) return false;
RoleModel that = (RoleModel) o;
return that.getId().equals(getId());
}
@Override
public int hashCode() {
return getId().hashCode();
}
}

View file

@ -1,357 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.adapters;
import org.keycloak.connections.mongo.api.context.MongoStoreInvocationContext;
import org.keycloak.models.ClientModel;
import org.keycloak.models.GroupModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.RoleModel;
import org.keycloak.models.UserModel;
import org.keycloak.models.mongo.keycloak.entities.MongoUserEntity;
import org.keycloak.models.mongo.utils.MongoModelUtils;
import org.keycloak.models.utils.KeycloakModelUtils;
import org.keycloak.models.utils.RoleUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Wrapper around UserData object, which will persist wrapped object after each set operation (compatibility with picketlink based idm)
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class UserAdapter extends AbstractMongoAdapter<MongoUserEntity> implements UserModel {
private final MongoUserEntity user;
private final RealmModel realm;
private final KeycloakSession session;
public UserAdapter(KeycloakSession session, RealmModel realm, MongoUserEntity userEntity, MongoStoreInvocationContext invContext) {
super(invContext);
this.user = userEntity;
this.realm = realm;
this.session = session;
}
@Override
public String getId() {
return user.getId();
}
@Override
public String getUsername() {
return user.getUsername();
}
@Override
public void setUsername(String username) {
username = KeycloakModelUtils.toLowerCaseSafe(username);
user.setUsername(username);
updateUser();
}
@Override
public Long getCreatedTimestamp() {
return user.getCreatedTimestamp();
}
@Override
public void setCreatedTimestamp(Long timestamp) {
user.setCreatedTimestamp(timestamp);
}
@Override
public boolean isEnabled() {
return user.isEnabled();
}
@Override
public void setEnabled(boolean enabled) {
user.setEnabled(enabled);
updateUser();
}
@Override
public String getFirstName() {
return user.getFirstName();
}
@Override
public void setFirstName(String firstName) {
user.setFirstName(firstName);
updateUser();
}
@Override
public String getLastName() {
return user.getLastName();
}
@Override
public void setLastName(String lastName) {
user.setLastName(lastName);
updateUser();
}
@Override
public String getEmail() {
return user.getEmail();
}
@Override
public void setEmail(String email) {
email = KeycloakModelUtils.toLowerCaseSafe(email);
user.setEmail(email, realm.isDuplicateEmailsAllowed());
updateUser();
}
@Override
public boolean isEmailVerified() {
return user.isEmailVerified();
}
@Override
public void setEmailVerified(boolean verified) {
user.setEmailVerified(verified);
updateUser();
}
@Override
public void setSingleAttribute(String name, String value) {
if (user.getAttributes() == null) {
user.setAttributes(new HashMap<String, List<String>>());
}
List<String> attrValues = new ArrayList<>();
attrValues.add(value);
user.getAttributes().put(name, attrValues);
updateUser();
}
@Override
public void setAttribute(String name, List<String> values) {
if (user.getAttributes() == null) {
user.setAttributes(new HashMap<String, List<String>>());
}
user.getAttributes().put(name, values);
updateUser();
}
@Override
public void removeAttribute(String name) {
if (user.getAttributes() == null) return;
user.getAttributes().remove(name);
updateUser();
}
@Override
public String getFirstAttribute(String name) {
if (user.getAttributes()==null) return null;
List<String> attrValues = user.getAttributes().get(name);
return (attrValues==null || attrValues.isEmpty()) ? null : attrValues.get(0);
}
@Override
public List<String> getAttribute(String name) {
if (user.getAttributes()==null) return Collections.<String>emptyList();
List<String> attrValues = user.getAttributes().get(name);
return (attrValues == null) ? Collections.<String>emptyList() : Collections.unmodifiableList(attrValues);
}
@Override
public Map<String, List<String>> getAttributes() {
return user.getAttributes()==null ? Collections.<String, List<String>>emptyMap() : Collections.unmodifiableMap((Map) user.getAttributes());
}
public MongoUserEntity getUser() {
return user;
}
@Override
public Set<String> getRequiredActions() {
Set<String> result = new HashSet<String>();
if (user.getRequiredActions() != null) {
result.addAll(user.getRequiredActions());
}
return result;
}
@Override
public void addRequiredAction(RequiredAction action) {
String actionName = action.name();
addRequiredAction(actionName);
}
@Override
public void addRequiredAction(String actionName) {
getMongoStore().pushItemToList(user, "requiredActions", actionName, true, invocationContext);
}
@Override
public void removeRequiredAction(RequiredAction action) {
String actionName = action.name();
removeRequiredAction(actionName);
}
@Override
public void removeRequiredAction(String actionName) {
getMongoStore().pullItemFromList(user, "requiredActions", actionName, invocationContext);
}
protected void updateUser() {
super.updateMongoEntity();
}
@Override
public MongoUserEntity getMongoEntity() {
return user;
}
@Override
public Set<GroupModel> getGroups() {
if (user.getGroupIds() == null || user.getGroupIds().size() == 0) return Collections.EMPTY_SET;
Set<GroupModel> groups = new HashSet<>();
for (String id : user.getGroupIds()) {
groups.add(realm.getGroupById(id));
}
return groups;
}
@Override
public void joinGroup(GroupModel group) {
getMongoStore().pushItemToList(getUser(), "groupIds", group.getId(), true, invocationContext);
}
@Override
public void leaveGroup(GroupModel group) {
if (user == null || group == null) return;
getMongoStore().pullItemFromList(getUser(), "groupIds", group.getId(), invocationContext);
}
@Override
public boolean isMemberOf(GroupModel group) {
if (user.getGroupIds() == null) return false;
if (user.getGroupIds().contains(group.getId())) return true;
Set<GroupModel> groups = getGroups();
return RoleUtils.isMember(groups, group);
}
@Override
public boolean hasRole(RoleModel role) {
Set<RoleModel> roles = getRoleMappings();
return RoleUtils.hasRole(roles, role)
|| RoleUtils.hasRoleFromGroup(getGroups(), role, true);
}
@Override
public void grantRole(RoleModel role) {
getMongoStore().pushItemToList(getUser(), "roleIds", role.getId(), true, invocationContext);
}
@Override
public Set<RoleModel> getRoleMappings() {
List<RoleModel> roles = MongoModelUtils.getAllRolesOfUser(realm, this);
return new HashSet<RoleModel>(roles);
}
@Override
public Set<RoleModel> getRealmRoleMappings() {
Set<RoleModel> allRoles = getRoleMappings();
// Filter to retrieve just realm roles
Set<RoleModel> realmRoles = new HashSet<RoleModel>();
for (RoleModel role : allRoles) {
if (role.getContainer() instanceof RealmModel) {
realmRoles.add(role);
}
}
return realmRoles;
}
@Override
public void deleteRoleMapping(RoleModel role) {
if (user == null || role == null) return;
getMongoStore().pullItemFromList(getUser(), "roleIds", role.getId(), invocationContext);
}
@Override
public Set<RoleModel> getClientRoleMappings(ClientModel app) {
Set<RoleModel> result = new HashSet<RoleModel>();
List<RoleModel> roles = MongoModelUtils.getAllRolesOfUser(realm, this);
for (RoleModel role : roles) {
if (app.equals(role.getContainer())) {
result.add(role);
}
}
return result;
}
@Override
public String getFederationLink() {
return user.getFederationLink();
}
@Override
public void setFederationLink(String link) {
user.setFederationLink(link);
updateUser();
}
@Override
public String getServiceAccountClientLink() {
return user.getServiceAccountClientLink();
}
@Override
public void setServiceAccountClientLink(String clientInternalId) {
user.setServiceAccountClientLink(clientInternalId);
updateUser();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || !(o instanceof UserModel)) return false;
UserModel that = (UserModel) o;
return that.getId().equals(getId());
}
@Override
public int hashCode() {
return getId().hashCode();
}
}

View file

@ -1,62 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.entities;
/**
* Base for the identifiable entity
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class AbstractIdentifiableEntity {
protected String id;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (this.id == null) return false;
if (o == null || getClass() != o.getClass()) return false;
AbstractIdentifiableEntity that = (AbstractIdentifiableEntity) o;
if (!getId().equals(that.getId())) return false;
return true;
}
@Override
public int hashCode() {
return id!=null ? id.hashCode() : super.hashCode();
}
@Override
public String toString() {
return String.format("%s [ id=%s ]", getClass().getSimpleName(), getId());
}
}

View file

@ -1,100 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.entities;
import org.keycloak.models.AuthenticationExecutionModel;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class AuthenticationExecutionEntity extends AbstractIdentifiableEntity {
protected String authenticator;
protected String authenticatorConfig;
protected String flowId;
protected AuthenticationExecutionModel.Requirement requirement;
protected int priority;
protected boolean userSetupAllowed;
protected boolean authenticatorFlow;
protected String parentFlow;
public String getAuthenticator() {
return authenticator;
}
public void setAuthenticator(String authenticator) {
this.authenticator = authenticator;
}
public AuthenticationExecutionModel.Requirement getRequirement() {
return requirement;
}
public void setRequirement(AuthenticationExecutionModel.Requirement requirement) {
this.requirement = requirement;
}
public int getPriority() {
return priority;
}
public void setPriority(int priority) {
this.priority = priority;
}
public boolean isUserSetupAllowed() {
return userSetupAllowed;
}
public void setUserSetupAllowed(boolean userSetupAllowed) {
this.userSetupAllowed = userSetupAllowed;
}
public boolean isAuthenticatorFlow() {
return authenticatorFlow;
}
public void setAuthenticatorFlow(boolean authenticatorFlow) {
this.authenticatorFlow = authenticatorFlow;
}
public String getParentFlow() {
return parentFlow;
}
public void setParentFlow(String parentFlow) {
this.parentFlow = parentFlow;
}
public String getFlowId() {
return flowId;
}
public void setFlowId(String flowId) {
this.flowId = flowId;
}
public String getAuthenticatorConfig() {
return authenticatorConfig;
}
public void setAuthenticatorConfig(String authenticatorConfig) {
this.authenticatorConfig = authenticatorConfig;
}
}

View file

@ -1,82 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.entities;
import java.util.ArrayList;
import java.util.List;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class AuthenticationFlowEntity extends AbstractIdentifiableEntity {
protected String alias;
protected String description;
protected String providerId;
protected boolean topLevel;
protected boolean builtIn;
List<AuthenticationExecutionEntity> executions = new ArrayList<AuthenticationExecutionEntity>();
public String getAlias() {
return alias;
}
public void setAlias(String alias) {
this.alias = alias;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<AuthenticationExecutionEntity> getExecutions() {
return executions;
}
public void setExecutions(List<AuthenticationExecutionEntity> executions) {
this.executions = executions;
}
public String getProviderId() {
return providerId;
}
public void setProviderId(String providerId) {
this.providerId = providerId;
}
public boolean isTopLevel() {
return topLevel;
}
public void setTopLevel(boolean topLevel) {
this.topLevel = topLevel;
}
public boolean isBuiltIn() {
return builtIn;
}
public void setBuiltIn(boolean builtIn) {
this.builtIn = builtIn;
}
}

View file

@ -1,45 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.mongo.keycloak.entities;
import java.util.Map;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class AuthenticatorConfigEntity extends AbstractIdentifiableEntity{
protected String alias;
protected Map<String, String> config;
public String getAlias() {
return alias;
}
public void setAlias(String alias) {
this.alias = alias;
}
public Map<String, String> getConfig() {
return config;
}
public void setConfig(Map<String, String> config) {
this.config = config;
}
}

Some files were not shown because too many files have changed in this diff Show more