KEYCLOAK-14162 Uplifted Apache DS version for LDAPEmbeddedServer

Signed-off-by: Tero Saarni <tero.saarni@est.tech>
This commit is contained in:
Tero Saarni 2020-05-13 15:58:20 +03:00 committed by Marek Posolda
parent bae802bcfa
commit bf8316eefa
7 changed files with 70 additions and 579 deletions

14
pom.xml
View file

@ -109,8 +109,8 @@
<commons-lang.version>2.6</commons-lang.version> <commons-lang.version>2.6</commons-lang.version>
<commons-lang3.version>3.9</commons-lang3.version> <commons-lang3.version>3.9</commons-lang3.version>
<commons-io.version>2.6</commons-io.version> <commons-io.version>2.6</commons-io.version>
<apacheds.version>2.0.0-M24</apacheds.version> <apacheds.version>2.0.0.AM26</apacheds.version>
<apacheds.codec.version>1.0.0</apacheds.codec.version> <apacheds.codec.version>2.0.0</apacheds.codec.version>
<google.zxing.version>3.4.0</google.zxing.version> <google.zxing.version>3.4.0</google.zxing.version>
<freemarker.version>2.3.29</freemarker.version> <freemarker.version>2.3.29</freemarker.version>
@ -579,6 +579,16 @@
</dependency> </dependency>
<!-- Apache DS --> <!-- Apache DS -->
<dependency>
<groupId>org.apache.directory.server</groupId>
<artifactId>apacheds-core</artifactId>
<version>${apacheds.version}</version>
</dependency>
<dependency>
<groupId>org.apache.directory.server</groupId>
<artifactId>apacheds-core-api</artifactId>
<version>${apacheds.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.apache.directory.server</groupId> <groupId>org.apache.directory.server</groupId>
<artifactId>apacheds-core-annotations</artifactId> <artifactId>apacheds-core-annotations</artifactId>

View file

@ -39,6 +39,14 @@
<groupId>commons-io</groupId> <groupId>commons-io</groupId>
<artifactId>commons-io</artifactId> <artifactId>commons-io</artifactId>
</dependency> </dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.jboss.logging</groupId> <groupId>org.jboss.logging</groupId>
<artifactId>jboss-logging</artifactId> <artifactId>jboss-logging</artifactId>
@ -82,6 +90,14 @@
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>org.apache.directory.server</groupId>
<artifactId>apacheds-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.directory.server</groupId>
<artifactId>apacheds-core-api</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.directory.api</groupId> <groupId>org.apache.directory.api</groupId>
<artifactId>api-ldap-codec-standalone</artifactId> <artifactId>api-ldap-codec-standalone</artifactId>

View file

@ -1,278 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.util.ldap;
import org.apache.directory.api.ldap.model.constants.SchemaConstants;
import org.apache.directory.api.ldap.model.schema.LdapComparator;
import org.apache.directory.api.ldap.model.schema.SchemaManager;
import org.apache.directory.api.ldap.model.schema.comparators.NormalizingComparator;
import org.apache.directory.api.ldap.model.schema.registries.ComparatorRegistry;
import org.apache.directory.api.ldap.model.schema.registries.SchemaLoader;
import org.apache.directory.api.ldap.schema.extractor.SchemaLdifExtractor;
import org.apache.directory.api.ldap.schema.extractor.impl.DefaultSchemaLdifExtractor;
import org.apache.directory.api.ldap.schema.loader.LdifSchemaLoader;
import org.apache.directory.api.ldap.schema.manager.impl.DefaultSchemaManager;
import org.apache.directory.api.util.exception.Exceptions;
import org.apache.directory.server.constants.ServerDNConstants;
import org.apache.directory.server.core.DefaultDirectoryService;
import org.apache.directory.server.core.api.CacheService;
import org.apache.directory.server.core.api.DirectoryService;
import org.apache.directory.server.core.api.InstanceLayout;
import org.apache.directory.server.core.api.partition.Partition;
import org.apache.directory.server.core.api.schema.SchemaPartition;
import org.apache.directory.server.core.factory.DefaultDirectoryServiceFactory;
import org.apache.directory.server.core.factory.DirectoryServiceFactory;
import org.apache.directory.server.core.factory.LdifPartitionFactory;
import org.apache.directory.server.core.factory.PartitionFactory;
import org.apache.directory.server.core.partition.ldif.LdifPartition;
import org.apache.directory.server.i18n.I18n;
import org.jboss.logging.Logger;
import java.io.File;
import java.io.IOException;
import java.util.List;
/**
* Slightly modified version of {@link DefaultDirectoryServiceFactory} which allows persistence among restarts and uses LDIF partitions by default
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
class FileDirectoryServiceFactory implements DirectoryServiceFactory {
/** A logger for this class */
private static final Logger LOG = Logger.getLogger(FileDirectoryServiceFactory.class);
/** The directory service. */
private DirectoryService directoryService;
/** The partition factory. */
private PartitionFactory partitionFactory;
public FileDirectoryServiceFactory()
{
try
{
// creating the instance here so that
// we we can set some properties like accesscontrol, anon access
// before starting up the service
directoryService = new DefaultDirectoryService();
// no need to register a shutdown hook during tests because this
// starts a lot of threads and slows down test execution
directoryService.setShutdownHookEnabled( false );
}
catch ( Exception e )
{
throw new RuntimeException( e );
}
try
{
String typeName = System.getProperty( "apacheds.partition.factory" );
if ( typeName != null )
{
Class<? extends PartitionFactory> type = ( Class<? extends PartitionFactory> ) Class.forName( typeName );
partitionFactory = type.newInstance();
}
else
{
// partitionFactory = new JdbmPartitionFactory();
partitionFactory = new LdifPartitionFactory();
}
}
catch ( Exception e )
{
LOG.error( "Error instantiating custom partiton factory", e );
throw new RuntimeException( e );
}
}
public FileDirectoryServiceFactory( DirectoryService directoryService, PartitionFactory partitionFactory )
{
this.directoryService = directoryService;
this.partitionFactory = partitionFactory;
}
/**
* {@inheritDoc}
*/
public void init( String name ) throws Exception
{
if ( ( directoryService != null ) && directoryService.isStarted() ) {
return;
}
build(name);
}
/**
* Build the working directory
*/
private void buildInstanceDirectory( String name ) throws IOException
{
String instanceDirectory = System.getProperty( "workingDirectory" );
if ( instanceDirectory == null )
{
instanceDirectory = System.getProperty( "java.io.tmpdir" ) + "/server-work-" + name;
}
InstanceLayout instanceLayout = new InstanceLayout( instanceDirectory );
/*if ( instanceLayout.getInstanceDirectory().exists() )
{
try
{
FileUtils.deleteDirectory(instanceLayout.getInstanceDirectory());
}
catch ( IOException e )
{
LOG.warn( "couldn't delete the instance directory before initializing the DirectoryService", e );
}
}*/
directoryService.setInstanceLayout( instanceLayout );
}
/**
* Inits the schema and schema partition.
*/
private void initSchema() throws Exception
{
File workingDirectory = directoryService.getInstanceLayout().getPartitionsDirectory();
// Extract the schema on disk (a brand new one) and load the registries
File schemaRepository = new File( workingDirectory, "schema" );
SchemaLdifExtractor extractor = new DefaultSchemaLdifExtractor( workingDirectory );
try
{
extractor.extractOrCopy();
}
catch ( IOException ioe )
{
// The schema has already been extracted, bypass
}
SchemaLoader loader = new LdifSchemaLoader( schemaRepository );
SchemaManager schemaManager = new DefaultSchemaManager( loader );
// We have to load the schema now, otherwise we won't be able
// to initialize the Partitions, as we won't be able to parse
// and normalize their suffix Dn
schemaManager.loadAllEnabled();
// Tell all the normalizer comparators that they should not normalize anything
ComparatorRegistry comparatorRegistry = schemaManager.getComparatorRegistry();
for ( LdapComparator<?> comparator : comparatorRegistry )
{
if ( comparator instanceof NormalizingComparator)
{
( ( NormalizingComparator ) comparator ).setOnServer();
}
}
directoryService.setSchemaManager( schemaManager );
// Init the LdifPartition
LdifPartition ldifPartition = new LdifPartition( schemaManager, directoryService.getDnFactory() );
ldifPartition.setPartitionPath( new File( workingDirectory, "schema" ).toURI() );
SchemaPartition schemaPartition = new SchemaPartition( schemaManager );
schemaPartition.setWrappedPartition( ldifPartition );
directoryService.setSchemaPartition( schemaPartition );
List<Throwable> errors = schemaManager.getErrors();
if ( errors.size() != 0 )
{
throw new Exception( I18n.err(I18n.ERR_317, Exceptions.printErrors(errors)) );
}
}
/**
* Inits the system partition.
*
* @throws Exception the exception
*/
private void initSystemPartition() throws Exception
{
// change the working directory to something that is unique
// on the system and somewhere either under target directory
// or somewhere in a temp area of the machine.
// Inject the System Partition
Partition systemPartition = partitionFactory.createPartition( directoryService.getSchemaManager(),
directoryService.getDnFactory(),
"system", ServerDNConstants.SYSTEM_DN, 500,
new File( directoryService.getInstanceLayout().getPartitionsDirectory(), "system" ) );
systemPartition.setSchemaManager(directoryService.getSchemaManager());
partitionFactory.addIndex(systemPartition, SchemaConstants.OBJECT_CLASS_AT, 100 );
directoryService.setSystemPartition( systemPartition );
}
/**
* Builds the directory server instance.
*
* @param name the instance name
*/
private void build( String name ) throws Exception
{
directoryService.setInstanceId( name );
buildInstanceDirectory( name );
CacheService cacheService = new CacheService();
cacheService.initialize( directoryService.getInstanceLayout() );
directoryService.setCacheService( cacheService );
// Init the service now
initSchema();
initSystemPartition();
directoryService.startup();
}
/**
* {@inheritDoc}
*/
public DirectoryService getDirectoryService() throws Exception
{
return directoryService;
}
/**
* {@inheritDoc}
*/
public PartitionFactory getPartitionFactory() throws Exception
{
return partitionFactory;
}
}

View file

@ -1,182 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.util.ldap;
import net.sf.ehcache.CacheManager;
import net.sf.ehcache.config.CacheConfiguration;
import net.sf.ehcache.config.Configuration;
import org.apache.commons.io.FileUtils;
import org.apache.directory.api.ldap.model.constants.SchemaConstants;
import org.apache.directory.api.ldap.model.schema.LdapComparator;
import org.apache.directory.api.ldap.model.schema.SchemaManager;
import org.apache.directory.api.ldap.model.schema.comparators.NormalizingComparator;
import org.apache.directory.api.ldap.model.schema.registries.ComparatorRegistry;
import org.apache.directory.api.ldap.model.schema.registries.SchemaLoader;
import org.apache.directory.api.ldap.schema.loader.JarLdifSchemaLoader;
import org.apache.directory.api.ldap.schema.manager.impl.DefaultSchemaManager;
import org.apache.directory.api.util.exception.Exceptions;
import org.apache.directory.server.constants.ServerDNConstants;
import org.apache.directory.server.core.DefaultDirectoryService;
import org.apache.directory.server.core.api.CacheService;
import org.apache.directory.server.core.api.DirectoryService;
import org.apache.directory.server.core.api.InstanceLayout;
import org.apache.directory.server.core.api.partition.Partition;
import org.apache.directory.server.core.api.schema.SchemaPartition;
import org.apache.directory.server.core.factory.AvlPartitionFactory;
import org.apache.directory.server.core.factory.DirectoryServiceFactory;
import org.apache.directory.server.core.factory.PartitionFactory;
import org.apache.directory.server.i18n.I18n;
import org.jboss.logging.Logger;
import java.io.File;
import java.io.IOException;
import java.util.List;
import org.apache.directory.server.core.api.interceptor.Interceptor;
import org.apache.directory.server.core.normalization.NormalizationInterceptor;
/**
* Factory for a fast (mostly in-memory-only) ApacheDS DirectoryService. Use only for tests!!
*
* @author Josef Cacek
*/
class InMemoryDirectoryServiceFactory implements DirectoryServiceFactory {
private static final Logger log = Logger.getLogger(InMemoryDirectoryServiceFactory.class);
private static final int PAGE_SIZE = 30;
private final DirectoryService directoryService;
private final PartitionFactory partitionFactory;
/**
* Default constructor which creates {@link DefaultDirectoryService} instance and configures {@link AvlPartitionFactory} as
* the {@link PartitionFactory} implementation.
*/
public InMemoryDirectoryServiceFactory() {
try {
directoryService = new DefaultDirectoryService();
} catch (Exception e) {
throw new RuntimeException(e);
}
directoryService.setShutdownHookEnabled(false);
partitionFactory = new AvlPartitionFactory();
}
/**
* Constructor which uses provided {@link DirectoryService} and {@link PartitionFactory} implementations.
*
* @param directoryService must be not-<code>null</code>
* @param partitionFactory must be not-<code>null</code>
*/
public InMemoryDirectoryServiceFactory(DirectoryService directoryService, PartitionFactory partitionFactory) {
this.directoryService = directoryService;
this.partitionFactory = partitionFactory;
}
/**
* {@inheritDoc}
*/
@Override
public void init(String name) throws Exception {
if ((directoryService != null) && directoryService.isStarted()) {
return;
}
directoryService.setInstanceId(name);
// instance layout
InstanceLayout instanceLayout = new InstanceLayout(System.getProperty("java.io.tmpdir") + "/server-work-inmemory-" + name);
if (instanceLayout.getInstanceDirectory().exists()) {
try {
FileUtils.deleteDirectory(instanceLayout.getInstanceDirectory());
} catch (IOException e) {
log.warn("couldn't delete the instance directory before initializing the DirectoryService", e);
}
}
directoryService.setInstanceLayout(instanceLayout);
// EhCache in disabled-like-mode
Configuration ehCacheConfig = new Configuration();
ehCacheConfig.setName(name);
CacheConfiguration defaultCache = new CacheConfiguration(name + "-default", 1).eternal(false).timeToIdleSeconds(30)
.timeToLiveSeconds(30).overflowToDisk(false);
ehCacheConfig.addDefaultCache(defaultCache);
CacheService cacheService = new CacheService(new CacheManager(ehCacheConfig));
directoryService.setCacheService(cacheService);
// Init the schema
// SchemaLoader loader = new SingleLdifSchemaLoader();
SchemaLoader loader = new JarLdifSchemaLoader();
SchemaManager schemaManager = new DefaultSchemaManager(loader);
schemaManager.loadAllEnabled();
ComparatorRegistry comparatorRegistry = schemaManager.getComparatorRegistry();
for (LdapComparator<?> comparator : comparatorRegistry) {
if (comparator instanceof NormalizingComparator) {
((NormalizingComparator) comparator).setOnServer();
}
}
directoryService.setSchemaManager(schemaManager);
InMemorySchemaPartition inMemorySchemaPartition = new InMemorySchemaPartition(schemaManager);
SchemaPartition schemaPartition = new SchemaPartition(schemaManager);
schemaPartition.setWrappedPartition(inMemorySchemaPartition);
directoryService.setSchemaPartition(schemaPartition);
List<Throwable> errors = schemaManager.getErrors();
if (errors.size() != 0) {
throw new Exception(I18n.err(I18n.ERR_317, Exceptions.printErrors(errors)));
}
// Init system partition
Partition systemPartition = partitionFactory.createPartition(directoryService.getSchemaManager(),
directoryService.getDnFactory(), "system",
ServerDNConstants.SYSTEM_DN, 500,
new File(directoryService.getInstanceLayout().getPartitionsDirectory(),
"system"));
systemPartition.setSchemaManager(directoryService.getSchemaManager());
partitionFactory.addIndex(systemPartition, SchemaConstants.OBJECT_CLASS_AT, 100);
directoryService.setSystemPartition(systemPartition);
// Find Normalization interceptor in chain and add our range emulated interceptor
List<Interceptor> interceptors = directoryService.getInterceptors();
int insertionPosition = -1;
for (int pos = 0; pos < interceptors.size(); ++pos) {
Interceptor interceptor = interceptors.get(pos);
if (interceptor instanceof NormalizationInterceptor) {
insertionPosition = pos;
}
}
interceptors.add(insertionPosition + 1, new RangedAttributeInterceptor("member", PAGE_SIZE));
directoryService.setInterceptors(interceptors);
directoryService.startup();
}
/**
* {@inheritDoc}
*/
@Override
public DirectoryService getDirectoryService() throws Exception {
return directoryService;
}
/**
* {@inheritDoc}
*/
@Override
public PartitionFactory getPartitionFactory() throws Exception {
return partitionFactory;
}
}

View file

@ -1,93 +0,0 @@
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.util.ldap;
import org.apache.directory.api.ldap.model.constants.SchemaConstants;
import org.apache.directory.api.ldap.model.entry.DefaultEntry;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.ldif.LdifEntry;
import org.apache.directory.api.ldap.model.ldif.LdifReader;
import org.apache.directory.api.ldap.model.schema.SchemaManager;
import org.apache.directory.api.ldap.schema.extractor.impl.DefaultSchemaLdifExtractor;
import org.apache.directory.api.ldap.schema.extractor.impl.ResourceMap;
import org.apache.directory.server.core.api.interceptor.context.AddOperationContext;
import org.apache.directory.server.core.partition.ldif.AbstractLdifPartition;
import org.jboss.logging.Logger;
import javax.naming.InvalidNameException;
import java.net.URL;
import java.util.Map;
import java.util.TreeSet;
import java.util.UUID;
import java.util.regex.Pattern;
/**
* In-memory schema-only partition which loads the data in the similar way as the
* {@link org.apache.directory.api.ldap.schemaloader.JarLdifSchemaLoader}.
*
* @author Josef Cacek
*/
class InMemorySchemaPartition extends AbstractLdifPartition {
private static final Logger log = Logger.getLogger(InMemorySchemaPartition.class);
/**
* Filesystem path separator pattern, either forward slash or backslash. java.util.regex.Pattern is immutable so only one
* instance is needed for all uses.
*/
public InMemorySchemaPartition(SchemaManager schemaManager) {
super(schemaManager);
}
/**
* Partition initialization - loads schema entries from the files on classpath.
*
* @see org.apache.directory.server.core.partition.impl.avl.AvlPartition#doInit()
*/
@Override
protected void doInit() throws InvalidNameException, Exception {
if (initialized)
return;
log.debug("Initializing schema partition " + getId());
suffixDn.apply(schemaManager);
super.doInit();
// load schema
final Map<String, Boolean> resMap = ResourceMap.getResources(Pattern.compile("schema[/\\Q\\\\E]ou=schema.*"));
for (String resourcePath : new TreeSet<String>(resMap.keySet())) {
if (resourcePath.endsWith(".ldif")) {
URL resource = DefaultSchemaLdifExtractor.getUniqueResource(resourcePath, "Schema LDIF file");
LdifReader reader = new LdifReader(resource.openStream());
LdifEntry ldifEntry = reader.next();
reader.close();
Entry entry = new DefaultEntry(schemaManager, ldifEntry.getEntry());
// add mandatory attributes
if (entry.get(SchemaConstants.ENTRY_CSN_AT) == null) {
entry.add(SchemaConstants.ENTRY_CSN_AT, AbstractLdifPartition.defaultCSNFactory.newInstance().toString());
}
if (entry.get(SchemaConstants.ENTRY_UUID_AT) == null) {
entry.add(SchemaConstants.ENTRY_UUID_AT, UUID.randomUUID().toString());
}
AddOperationContext addContext = new AddOperationContext(null, entry);
super.add(addContext);
}
}
}
}

View file

@ -26,9 +26,12 @@ import org.apache.directory.api.ldap.model.ldif.LdifEntry;
import org.apache.directory.api.ldap.model.ldif.LdifReader; import org.apache.directory.api.ldap.model.ldif.LdifReader;
import org.apache.directory.api.ldap.model.schema.SchemaManager; import org.apache.directory.api.ldap.model.schema.SchemaManager;
import org.apache.directory.server.core.api.DirectoryService; import org.apache.directory.server.core.api.DirectoryService;
import org.apache.directory.server.core.api.interceptor.Interceptor;
import org.apache.directory.server.core.api.partition.Partition; import org.apache.directory.server.core.api.partition.Partition;
import org.apache.directory.server.core.factory.DirectoryServiceFactory; import org.apache.directory.server.core.factory.AvlPartitionFactory;
import org.apache.directory.server.core.factory.PartitionFactory; import org.apache.directory.server.core.factory.DefaultDirectoryServiceFactory;
import org.apache.directory.server.core.factory.JdbmPartitionFactory;
import org.apache.directory.server.core.normalization.NormalizationInterceptor;
import org.apache.directory.server.ldap.LdapServer; import org.apache.directory.server.ldap.LdapServer;
import org.apache.directory.server.protocol.shared.transport.TcpTransport; import org.apache.directory.server.protocol.shared.transport.TcpTransport;
import org.apache.directory.server.protocol.shared.transport.Transport; import org.apache.directory.server.protocol.shared.transport.Transport;
@ -39,6 +42,7 @@ import org.keycloak.common.util.StreamUtil;
import java.io.File; import java.io.File;
import java.io.InputStream; import java.io.InputStream;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Properties; import java.util.Properties;
@ -48,6 +52,7 @@ import java.util.Properties;
public class LDAPEmbeddedServer { public class LDAPEmbeddedServer {
private static final Logger log = Logger.getLogger(LDAPEmbeddedServer.class); private static final Logger log = Logger.getLogger(LDAPEmbeddedServer.class);
private static final int PAGE_SIZE = 30;
public static final String PROPERTY_BASE_DN = "ldap.baseDN"; public static final String PROPERTY_BASE_DN = "ldap.baseDN";
public static final String PROPERTY_BIND_HOST = "ldap.host"; public static final String PROPERTY_BIND_HOST = "ldap.host";
@ -171,15 +176,15 @@ public class LDAPEmbeddedServer {
String dcName = baseDN.split(",")[0]; String dcName = baseDN.split(",")[0];
dcName = dcName.substring(dcName.indexOf("=") + 1); dcName = dcName.substring(dcName.indexOf("=") + 1);
DirectoryServiceFactory dsf;
if (this.directoryServiceFactory.equals(DSF_INMEMORY)) { if (this.directoryServiceFactory.equals(DSF_INMEMORY)) {
dsf = new InMemoryDirectoryServiceFactory(); System.setProperty( "apacheds.partition.factory", AvlPartitionFactory.class.getName());
} else if (this.directoryServiceFactory.equals(DSF_FILE)) { } else if (this.directoryServiceFactory.equals(DSF_FILE)) {
dsf = new FileDirectoryServiceFactory(); System.setProperty( "apacheds.partition.factory", JdbmPartitionFactory.class.getName());
} else { } else {
throw new IllegalStateException("Unknown value of directoryServiceFactory: " + this.directoryServiceFactory); throw new IllegalStateException("Unknown value of directoryServiceFactory: " + this.directoryServiceFactory);
} }
DefaultDirectoryServiceFactory dsf = new DefaultDirectoryServiceFactory();
DirectoryService service = dsf.getDirectoryService(); DirectoryService service = dsf.getDirectoryService();
service.setAccessControlEnabled(false); service.setAccessControlEnabled(false);
service.setAllowAnonymousAccess(false); service.setAllowAnonymousAccess(false);
@ -187,20 +192,16 @@ public class LDAPEmbeddedServer {
dsf.init(dcName + "DS"); dsf.init(dcName + "DS");
SchemaManager schemaManager = service.getSchemaManager(); Partition partition = dsf.getPartitionFactory().createPartition(
service.getSchemaManager(),
PartitionFactory partitionFactory = dsf.getPartitionFactory();
Partition partition = partitionFactory.createPartition(
schemaManager,
service.getDnFactory(), service.getDnFactory(),
dcName, dcName,
this.baseDN, this.baseDN,
1000, 1000,
new File(service.getInstanceLayout().getPartitionsDirectory(), dcName)); new File(service.getInstanceLayout().getPartitionsDirectory(), dcName));
partition.setCacheService( service.getCacheService() );
partition.initialize(); partition.initialize();
partition.setSchemaManager( schemaManager ); partition.setSchemaManager(service.getSchemaManager());
// Inject the partition into the DirectoryService // Inject the partition into the DirectoryService
service.addPartition( partition ); service.addPartition( partition );
@ -213,6 +214,23 @@ public class LDAPEmbeddedServer {
"objectClass: domain\n\n"; "objectClass: domain\n\n";
importLdifContent(service, entryLdif); importLdifContent(service, entryLdif);
if (this.directoryServiceFactory.equals(DSF_INMEMORY)) {
// Find Normalization interceptor in chain and add our range emulated interceptor
List<Interceptor> interceptors = service.getInterceptors();
int insertionPosition = -1;
for (int pos = 0; pos < interceptors.size(); ++pos) {
Interceptor interceptor = interceptors.get(pos);
if (interceptor instanceof NormalizationInterceptor) {
insertionPosition = pos;
}
}
RangedAttributeInterceptor interceptor = new RangedAttributeInterceptor("member", PAGE_SIZE);
interceptor.init(service);
interceptors.add(insertionPosition + 1, interceptor);
service.setInterceptors(interceptors);
}
return service; return service;
} }

View file

@ -73,16 +73,16 @@ public class RangedAttributeInterceptor extends BaseInterceptor {
int end = (max != null && max < attr.size() - 1)? max : attr.size() - 1; int end = (max != null && max < attr.size() - 1)? max : attr.size() - 1;
if (start != 0 || end != attr.size() - 1) { if (start != 0 || end != attr.size() - 1) {
// some values should be stripped out // some values should be stripped out
Iterator<Value<?>> it = attr.iterator(); Iterator<Value> it = attr.iterator();
Set<Value<?>> valuesToRemove = new HashSet<>(end - start + 1); Set<Value> valuesToRemove = new HashSet<>(end - start + 1);
for (int i = 0; i < attr.size(); i++) { for (int i = 0; i < attr.size(); i++) {
Value<?> v = it.next(); Value v = it.next();
if (i < start || i > end) { if (i < start || i > end) {
valuesToRemove.add(v); valuesToRemove.add(v);
} }
} }
attr.setUpId(attr.getUpId() + ";range=" + start + "-" + ((end == attr.size() - 1)? "*" : end)); attr.setUpId(attr.getUpId() + ";range=" + start + "-" + ((end == attr.size() - 1)? "*" : end));
attr.remove(valuesToRemove.toArray(new Value<?>[0])); attr.remove(valuesToRemove.toArray(new Value[0]));
} else if (min != null) { } else if (min != null) {
// range explicitly requested although no value stripped // range explicitly requested although no value stripped
attr.setUpId(attr.getUpId() + ";range=0-*"); attr.setUpId(attr.getUpId() + ";range=0-*");