Compare commits
95 commits
eee387c0ab
...
9bf20bb2cd
Author | SHA1 | Date | |
---|---|---|---|
9bf20bb2cd | |||
|
5ba1efc858 | ||
|
a2ba3c8ace | ||
|
b1ff9511d1 | ||
|
33cae33ae4 | ||
|
226daa41c7 | ||
|
fec661cf10 | ||
|
d2e19da64e | ||
|
b2930a4799 | ||
|
a9c3e592f3 | ||
|
a8d9a5553f | ||
|
ce454bda47 | ||
|
b44aee7535 | ||
|
65e90d2ff4 | ||
|
36defd5f33 | ||
|
8853a942f9 | ||
|
927f110aef | ||
|
bd1a5a1543 | ||
|
35b425736a | ||
|
1718a3ee94 | ||
|
9851452be1 | ||
|
6482e41cd8 | ||
|
7d70ea7c20 | ||
|
cd86405064 | ||
|
fd97f9c7d7 | ||
|
8b1cdb1fc3 | ||
|
5b6ac5b14b | ||
|
b3dd26a7c3 | ||
|
d6b01015c4 | ||
|
612e2caae1 | ||
|
25e4995eb7 | ||
|
cb38ad10ea | ||
|
440624e398 | ||
|
373656593d | ||
|
e8543e77d2 | ||
|
3315ea718a | ||
|
f229790ba5 | ||
|
822d3fde32 | ||
|
8be4237fd4 | ||
|
8855cf2316 | ||
|
f8df8e1c9a | ||
|
910caf5ff8 | ||
|
1a038af507 | ||
|
07464b11de | ||
|
fb64e3ba5f | ||
|
81950f5d17 | ||
|
2b4fbfe66b | ||
|
e4101b1b61 | ||
|
7681687e0a | ||
|
d80cb010ff | ||
|
af434d6bc1 | ||
|
2e51775acc | ||
|
9a7cfb38ac | ||
|
a7af380f71 | ||
|
e72da1ac2c | ||
|
53cfcdc273 | ||
|
1d8b61b991 | ||
|
d853dcab7d | ||
|
36b01cbea0 | ||
|
ba51140a25 | ||
|
4e540fa2a7 | ||
|
db780ed6c7 | ||
|
9c50813bf4 | ||
|
78aa08941a | ||
|
a79b67cac8 | ||
|
19ef0a608b | ||
|
0d9d2908f1 | ||
|
98a4faf289 | ||
|
c64e0ad583 | ||
|
abb7c414ab | ||
|
7e470e81f8 | ||
|
a76f9096e8 | ||
|
4ad462fbd3 | ||
|
ac25844731 | ||
|
b27a5d05b4 | ||
|
f9f9a313b3 | ||
|
35b109b4eb | ||
|
7368104e43 | ||
|
77231bd68c | ||
|
3d91df42d8 | ||
|
8c2bc39418 | ||
|
b4caeee0c7 | ||
|
eb5afeeabb | ||
|
fd2338c4fc | ||
|
7152a8b0f3 | ||
|
7bbc35cba7 | ||
|
3c727a32f4 | ||
|
617cadb84b | ||
|
6af682a897 | ||
|
87c87face7 | ||
|
97727dbed5 | ||
|
64f97be053 | ||
|
e41ca1f579 | ||
|
3d663802bb | ||
|
de973de800 |
|
@ -7,7 +7,7 @@ inputs:
|
|||
release-branches:
|
||||
description: 'List of all related release branches (in JSON format)'
|
||||
required: false
|
||||
default: '["refs/heads/release/22.0","refs/heads/release/24.0"]'
|
||||
default: '["refs/heads/release/22.0","refs/heads/release/24.0","refs/heads/release/26.0"]'
|
||||
keep-days:
|
||||
description: 'For how many days to store the particular artifact.'
|
||||
required: false
|
||||
|
|
6
.github/actions/build-keycloak/action.yml
vendored
|
@ -24,9 +24,9 @@ runs:
|
|||
with:
|
||||
create-cache-if-it-doesnt-exist: true
|
||||
|
||||
- id: frontend-plugin-cache
|
||||
name: Frontend Plugin Cache
|
||||
uses: ./.github/actions/frontend-plugin-cache
|
||||
- id: pnpm-store-cache
|
||||
name: PNPM store cache
|
||||
uses: ./.github/actions/pnpm-store-cache
|
||||
|
||||
- id: build-keycloak
|
||||
name: Build Keycloak
|
||||
|
|
3
.github/actions/conditional/action.yml
vendored
|
@ -22,9 +22,6 @@ outputs:
|
|||
ci-webauthn:
|
||||
description: Should "ci.yml" execute (WebAuthn)
|
||||
value: ${{ steps.changes.outputs.ci-webauthn }}
|
||||
ci-test-poc:
|
||||
description: Should "ci.yml" execute (Test PoC)
|
||||
value: ${{ steps.changes.outputs.ci-test-poc }}
|
||||
operator:
|
||||
description: Should "operator-ci.yml" execute
|
||||
value: ${{ steps.changes.outputs.operator }}
|
||||
|
|
2
.github/actions/conditional/conditions
vendored
|
@ -60,5 +60,3 @@ js/libs/keycloak-js/ ci ci-quarkus
|
|||
*.tsx codeql-typescript
|
||||
|
||||
testsuite::database-suite ci-store
|
||||
|
||||
test-poc/ ci ci-test-poc
|
20
.github/actions/cypress-cache/action.yml
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
name: Cache Cypress
|
||||
description: Caches Cypress binary to speed up the build.
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- id: cache-key
|
||||
name: Cache key based on Cypress version
|
||||
shell: bash
|
||||
run: echo "key=cypress-binary-$(jq -r '.devDependencies.cypress' js/apps/admin-ui/package.json)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Cache Cypress binary
|
||||
with:
|
||||
# See: https://docs.cypress.io/app/references/advanced-installation#Binary-cache
|
||||
path: |
|
||||
~/.cache/Cypress
|
||||
/AppData/Local/Cypress/Cache
|
||||
~/Library/Caches/Cypress
|
||||
key: ${{ runner.os }}-${{ steps.cache-key.outputs.key }}
|
21
.github/actions/frontend-plugin-cache/action.yml
vendored
|
@ -1,21 +0,0 @@
|
|||
name: Frontend Plugin Cache
|
||||
description: Caches NPM dependencies for the frontend-maven-plugin to speed up builds
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Get PNPM version
|
||||
id: pnpm-version
|
||||
shell: bash
|
||||
run: |
|
||||
echo "version=$(./mvnw help:evaluate -Dexpression=pnpm.version -q -DforceStdout)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Cache PNPM store
|
||||
with:
|
||||
# See: https://pnpm.io/npmrc#store-dir
|
||||
path: |
|
||||
~/.local/share/pnpm/store
|
||||
~/AppData/Local/pnpm/store
|
||||
~/Library/pnpm/store
|
||||
key: ${{ runner.os }}-frontend-plugin-pnpm-store-${{ steps.pnpm-version.outputs.version }}-${{ hashFiles('pnpm-lock.yaml') }}
|
|
@ -25,9 +25,9 @@ runs:
|
|||
name: Maven cache
|
||||
uses: ./.github/actions/maven-cache
|
||||
|
||||
- id: frontend-plugin-cache
|
||||
name: Frontend Plugin Cache
|
||||
uses: ./.github/actions/frontend-plugin-cache
|
||||
- id: pnpm-store-cache
|
||||
name: PNPM store cache
|
||||
uses: ./.github/actions/pnpm-store-cache
|
||||
|
||||
- id: download-keycloak
|
||||
name: Download Keycloak Maven artifacts
|
||||
|
|
31
.github/actions/pnpm-setup/action.yml
vendored
|
@ -20,26 +20,19 @@ runs:
|
|||
shell: bash
|
||||
run: corepack enable
|
||||
|
||||
- name: Get PNPM store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "store-path=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
|
||||
- name: PNPM store cache
|
||||
uses: ./.github/actions/pnpm-store-cache
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup PNPM cache
|
||||
with:
|
||||
# Also cache Cypress binary.
|
||||
path: |
|
||||
~/.cache/Cypress
|
||||
${{ steps.pnpm-cache.outputs.store-path }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
- name: Cypress binary cache
|
||||
uses: ./.github/actions/cypress-cache
|
||||
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
# Run the store prune after the installation to avoid having caches which grow over time
|
||||
run: |
|
||||
pnpm install --prefer-offline --frozen-lockfile
|
||||
pnpm store prune
|
||||
run: pnpm install --prefer-offline --frozen-lockfile
|
||||
|
||||
# This step is only needed to ensure that the Cypress binary is installed.
|
||||
# If the binary was retrieved from the cache, this step is a no-op.
|
||||
- name: Install Cypress dependencies
|
||||
shell: bash
|
||||
working-directory: js/apps/admin-ui
|
||||
run: pnpm exec cypress install
|
||||
|
|
20
.github/actions/pnpm-store-cache/action.yml
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
name: Cache PNPM store
|
||||
description: Caches the PNPM store to speed up the build.
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- id: weekly-cache-key
|
||||
name: Key for weekly rotation of cache
|
||||
shell: bash
|
||||
run: echo "key=pnpm-store-`date -u "+%Y-%U"`" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Cache PNPM store
|
||||
with:
|
||||
# See: https://pnpm.io/npmrc#store-dir
|
||||
path: |
|
||||
~/.local/share/pnpm/store
|
||||
~/AppData/Local/pnpm/store
|
||||
~/Library/pnpm/store
|
||||
key: ${{ runner.os }}-${{ steps.weekly-cache-key.outputs.key }}
|
4
.github/actions/unit-test-setup/action.yml
vendored
|
@ -11,6 +11,6 @@ runs:
|
|||
name: Maven cache
|
||||
uses: ./.github/actions/maven-cache
|
||||
|
||||
- id: frontend-plugin-cache
|
||||
- id: pnpm-store-cache
|
||||
name: Frontend Plugin Cache
|
||||
uses: ./.github/actions/frontend-plugin-cache
|
||||
uses: ./.github/actions/pnpm-store-cache
|
||||
|
|
20
.github/workflows/ci.yml
vendored
|
@ -33,7 +33,6 @@ jobs:
|
|||
ci-store: ${{ steps.conditional.outputs.ci-store }}
|
||||
ci-sssd: ${{ steps.conditional.outputs.ci-sssd }}
|
||||
ci-webauthn: ${{ steps.conditional.outputs.ci-webauthn }}
|
||||
ci-test-poc: ${{ steps.conditional.outputs.ci-test-poc }}
|
||||
ci-aurora: ${{ steps.auroradb-tests.outputs.run-aurora-tests }}
|
||||
|
||||
steps:
|
||||
|
@ -84,7 +83,7 @@ jobs:
|
|||
run: |
|
||||
SEP=""
|
||||
PROJECTS=""
|
||||
for i in `find -name '*Test.java' -type f | egrep -v './(testsuite|quarkus|docs|test-poc|test-framework)/' | sed 's|/src/test/java/.*||' | sort | uniq | sed 's|./||'`; do
|
||||
for i in `find -name '*Test.java' -type f | egrep -v './(testsuite|quarkus|docs|tests|test-framework)/' | sed 's|/src/test/java/.*||' | sort | uniq | sed 's|./||'`; do
|
||||
PROJECTS="$PROJECTS$SEP$i"
|
||||
SEP=","
|
||||
done
|
||||
|
@ -958,7 +957,7 @@ jobs:
|
|||
job-id: migration-tests-${{ matrix.old-version }}-${{ matrix.database }}
|
||||
|
||||
test-framework:
|
||||
name: Keycloak Test Framework
|
||||
name: Test Framework
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
timeout-minutes: 30
|
||||
|
@ -970,14 +969,12 @@ jobs:
|
|||
uses: ./.github/actions/integration-test-setup
|
||||
|
||||
- name: Run tests
|
||||
run: ./mvnw test -f test-framework/pom.xml
|
||||
run: ./mvnw package -f test-framework/pom.xml
|
||||
|
||||
test-poc:
|
||||
name: Test PoC
|
||||
base-new-integration-tests:
|
||||
name: Base IT (new)
|
||||
runs-on: ubuntu-latest
|
||||
if: needs.conditional.outputs.ci-test-poc == 'true'
|
||||
needs:
|
||||
- conditional
|
||||
- build
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
|
@ -988,9 +985,7 @@ jobs:
|
|||
uses: ./.github/actions/integration-test-setup
|
||||
|
||||
- name: Run tests
|
||||
env:
|
||||
KC_TEST_BROWSER: chrome-headless
|
||||
run: ./mvnw clean install -f test-poc/pom.xml
|
||||
run: ./mvnw test -f tests/pom.xml
|
||||
|
||||
check:
|
||||
name: Status Check - Keycloak CI
|
||||
|
@ -1015,7 +1010,8 @@ jobs:
|
|||
- sssd-unit-tests
|
||||
- migration-tests
|
||||
- external-infinispan-tests
|
||||
- test-poc
|
||||
- test-framework
|
||||
- base-new-integration-tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
|
2
.github/workflows/js-ci.yml
vendored
|
@ -240,7 +240,7 @@ jobs:
|
|||
- name: Start Keycloak server
|
||||
run: |
|
||||
tar xfvz keycloak-999.0.0-SNAPSHOT.tar.gz
|
||||
keycloak-999.0.0-SNAPSHOT/bin/kc.sh start-dev --features=admin-fine-grained-authz,transient-users &> ~/server.log &
|
||||
keycloak-999.0.0-SNAPSHOT/bin/kc.sh start-dev --features=admin-fine-grained-authz:v1,transient-users &> ~/server.log &
|
||||
env:
|
||||
KC_BOOTSTRAP_ADMIN_USERNAME: admin
|
||||
KC_BOOTSTRAP_ADMIN_PASSWORD: admin
|
||||
|
|
|
@ -53,7 +53,9 @@ public class Profile {
|
|||
|
||||
ACCOUNT_V3("Account Console version 3", Type.DEFAULT, 3, Feature.ACCOUNT_API),
|
||||
|
||||
ADMIN_FINE_GRAINED_AUTHZ("Fine-Grained Admin Permissions", Type.PREVIEW),
|
||||
ADMIN_FINE_GRAINED_AUTHZ("Fine-Grained Admin Permissions", Type.PREVIEW, 1),
|
||||
|
||||
ADMIN_FINE_GRAINED_AUTHZ_V2("Fine-Grained Admin Permissions version 2", Type.EXPERIMENTAL, 2, Feature.AUTHORIZATION),
|
||||
|
||||
ADMIN_API("Admin API", Type.DEFAULT),
|
||||
|
||||
|
@ -112,6 +114,8 @@ public class Profile {
|
|||
|
||||
OID4VC_VCI("Support for the OID4VCI protocol as part of OID4VC.", Type.EXPERIMENTAL),
|
||||
|
||||
SCIM("Synchronise users and groups with registered SCIM endpoints", Type.EXPERIMENTAL),
|
||||
|
||||
OPENTELEMETRY("OpenTelemetry Tracing", Type.PREVIEW),
|
||||
|
||||
DECLARATIVE_UI("declarative ui spi", Type.EXPERIMENTAL),
|
||||
|
@ -121,6 +125,8 @@ public class Profile {
|
|||
PASSKEYS("Passkeys", Type.PREVIEW),
|
||||
|
||||
CACHE_EMBEDDED_REMOTE_STORE("Support for remote-store in embedded Infinispan caches", Type.EXPERIMENTAL),
|
||||
|
||||
USER_EVENT_METRICS("Collect metrics based on user events", Type.PREVIEW),
|
||||
;
|
||||
|
||||
private final Type type;
|
||||
|
@ -335,13 +341,13 @@ public class Profile {
|
|||
*/
|
||||
private static Map<String, TreeSet<Feature>> getOrderedFeatures() {
|
||||
if (FEATURES == null) {
|
||||
// "natural" ordering low to high between two features
|
||||
Comparator<Feature> comparator = Comparator.comparing(Feature::getType).thenComparingInt(Feature::getVersion);
|
||||
// "natural" ordering low to high between two features (type has precedence and then reversed version is used)
|
||||
Comparator<Feature> comparator = Comparator.comparing(Feature::getType).thenComparing(Comparator.comparingInt(Feature::getVersion).reversed());
|
||||
// aggregate the features by unversioned key
|
||||
HashMap<String, TreeSet<Feature>> features = new HashMap<>();
|
||||
Stream.of(Feature.values()).forEach(f -> features.compute(f.getUnversionedKey(), (k, v) -> {
|
||||
if (v == null) {
|
||||
v = new TreeSet<>(comparator.reversed()); // we want the highest priority first
|
||||
v = new TreeSet<>(comparator);
|
||||
}
|
||||
v.add(f);
|
||||
return v;
|
||||
|
|
|
@ -35,4 +35,6 @@ public interface ServiceAccountConstants {
|
|||
String CLIENT_HOST = "clientHost";
|
||||
String CLIENT_ADDRESS = "clientAddress";
|
||||
|
||||
String SERVICE_ACCOUNT_SCOPE = "service_account";
|
||||
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ public class ProfileTest {
|
|||
|
||||
private static final Profile.Feature DEFAULT_FEATURE = Profile.Feature.AUTHORIZATION;
|
||||
private static final Profile.Feature DISABLED_BY_DEFAULT_FEATURE = Profile.Feature.DOCKER;
|
||||
private static final Profile.Feature PREVIEW_FEATURE = Profile.Feature.ADMIN_FINE_GRAINED_AUTHZ;
|
||||
private static final Profile.Feature PREVIEW_FEATURE = Profile.Feature.TOKEN_EXCHANGE;
|
||||
private static final Profile.Feature EXPERIMENTAL_FEATURE = Profile.Feature.DYNAMIC_SCOPES;
|
||||
private static Profile.Feature DEPRECATED_FEATURE = Profile.Feature.LOGIN_V1;
|
||||
|
||||
|
|
|
@ -218,6 +218,8 @@ public class RealmRepresentation {
|
|||
protected Boolean organizationsEnabled;
|
||||
private List<OrganizationRepresentation> organizations;
|
||||
|
||||
protected Boolean verifiableCredentialsEnabled;
|
||||
|
||||
@Deprecated
|
||||
protected Boolean social;
|
||||
@Deprecated
|
||||
|
@ -1440,6 +1442,14 @@ public class RealmRepresentation {
|
|||
this.organizationsEnabled = organizationsEnabled;
|
||||
}
|
||||
|
||||
public Boolean isVerifiableCredentialsEnabled() {
|
||||
return verifiableCredentialsEnabled;
|
||||
}
|
||||
|
||||
public void setVerifiableCredentialsEnabled(Boolean verifiableCredentialsEnabled) {
|
||||
this.verifiableCredentialsEnabled = verifiableCredentialsEnabled;
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public Map<String, String> getAttributesOrEmpty() {
|
||||
return (Map<String, String>) (attributes == null ? Collections.emptyMap() : attributes);
|
||||
|
|
6
dependencies/server-all/pom.xml
vendored
|
@ -70,6 +70,12 @@
|
|||
<groupId>org.keycloak</groupId>
|
||||
<artifactId>keycloak-kerberos-federation</artifactId>
|
||||
</dependency>
|
||||
<!-- SCIM federation -->
|
||||
<dependency>
|
||||
<groupId>org.keycloak</groupId>
|
||||
<artifactId>keycloak-scim-federation</artifactId>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<!-- saml -->
|
||||
<dependency>
|
||||
<groupId>org.keycloak</groupId>
|
||||
|
|
|
@ -8,3 +8,13 @@ If you are using a custom theme that extends any of the `keycloak` themes and ar
|
|||
----
|
||||
darkMode=false
|
||||
----
|
||||
|
||||
Alternatively, you can disable dark mode support for the built-in Keycloak themes on a per-realm basis by turning off the "Dark mode" setting under the "Theme" tab in the realm settings.
|
||||
|
||||
= LDAP users are created as enabled by default when using Microsoft Active Directory
|
||||
|
||||
If you are using Microsoft AD and creating users through the administrative interfaces, the user will created as enabled by default.
|
||||
|
||||
In previous versions, it was only possible to update the user status after setting a (non-temporary) password to the user.
|
||||
This behavior was not consistent with other built-in user storages as well as not consistent with others LDAP vendors supported
|
||||
by the LDAP provider.
|
||||
|
|
BIN
docs/documentation/server_admin/images/brute-force-mixed.png
Normal file
After Width: | Height: | Size: 319 KiB |
After Width: | Height: | Size: 219 KiB |
After Width: | Height: | Size: 297 KiB |
Before Width: | Height: | Size: 72 KiB After Width: | Height: | Size: 210 KiB |
After Width: | Height: | Size: 81 KiB |
After Width: | Height: | Size: 104 KiB |
|
@ -67,6 +67,7 @@ include::topics/threat.adoc[]
|
|||
include::topics/threat/host.adoc[]
|
||||
include::topics/threat/admin.adoc[]
|
||||
include::topics/threat/brute-force.adoc[]
|
||||
include::topics/threat/password.adoc[]
|
||||
include::topics/threat/read-only-attributes.adoc[]
|
||||
include::topics/threat/validate-user-attributes.adoc[]
|
||||
include::topics/threat/clickjacking.adoc[]
|
||||
|
|
|
@ -2,68 +2,110 @@
|
|||
[[password-guess-brute-force-attacks]]
|
||||
=== Brute force attacks
|
||||
|
||||
A brute force attack attempts to guess a user's password by trying to log in multiple times. {project_name} has brute force detection capabilities and can temporarily disable a user account if the number of login failures exceeds a specified threshold.
|
||||
A brute force attack attempts to guess a user's password by trying to log in multiple times. {project_name} has brute force detection capabilities and can permanently or temporarily disable a user account if the number of login failures exceeds a specified threshold.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
{project_name} disables brute force detection by default. Enable this feature to protect against brute force attacks.
|
||||
When a user is locked and attempts to log in, {project_name} displays the default `Invalid username or password` error message. This message is the same error message as the message displayed for an invalid username or invalid password to ensure the attacker is unaware the account is disabled.
|
||||
====
|
||||
|
||||
.Procedure
|
||||
[WARNING]
|
||||
====
|
||||
Brute force detection is disabled by default. Enable this feature to protect against brute force attacks.
|
||||
====
|
||||
|
||||
To enable this protection:
|
||||
|
||||
. Click *Realm Settings* in the menu
|
||||
. Click the *Security Defenses* tab.
|
||||
. Click the *Brute Force Detection* tab.
|
||||
. Choose the *Brute Force Mode* which best fit to your requirements.
|
||||
+
|
||||
.Brute force detection
|
||||
image:images/brute-force.png[]
|
||||
|
||||
{project_name} can deploy permanent lockout and temporary lockout actions when it detects an attack. Permanent lockout disables a user account until an administrator re-enables it. Temporary lockout disables a user account for a specific period of time.
|
||||
The time period that the account is disabled increases as the attack continues and subsequent failures reach multiples of `Max Login Failures`.
|
||||
==== Lockout permanently
|
||||
{project_name} disables a user account (blocking log in attemps) until an administrator re-enables it.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
When a user is temporarily locked and attempts to log in, {project_name} displays the default `Invalid username or password` error message. This message is the same error message as the message displayed for an invalid username or invalid password to ensure the attacker is unaware the account is disabled.
|
||||
====
|
||||
.Lockout permanently
|
||||
image:images/brute-force-permanently.png[]
|
||||
|
||||
*Common Parameters*
|
||||
*Permanent Lockout Parameters*
|
||||
|
||||
|===
|
||||
|Name |Description |Default
|
||||
|
||||
|Max Login Failures
|
||||
|The maximum number of login failures.
|
||||
|30 failures.
|
||||
|30 failures
|
||||
|
||||
|Quick Login Check Milliseconds
|
||||
|The minimum time between login attempts.
|
||||
|1000 milliseconds.
|
||||
|1000 milliseconds
|
||||
|
||||
|Minimum Quick Login Wait
|
||||
|The minimum time the user is disabled when login attempts are quicker than _Quick Login Check Milliseconds_.
|
||||
|1 minute.
|
||||
|1 minute
|
||||
|
||||
|===
|
||||
|
||||
*Permanent Lockout Flow*
|
||||
|
||||
====
|
||||
. On successful login
|
||||
.. Reset `count`
|
||||
. On failed login
|
||||
.. Increment `count`
|
||||
.. If `count` is greater than or equals to `Max login failures`
|
||||
... locks the user
|
||||
.. Else if the time between this failure and the last failure is less than _Quick Login Check Milliseconds_
|
||||
... Locks the user for the time specified at _Minimum Quick Login Wait_
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
Enabling an user account resets the `count`.
|
||||
====
|
||||
|
||||
==== Lockout temporarily
|
||||
{project_name} disables a user account for a specific period of time. The time period that the account is disabled increases as the attack continues.
|
||||
|
||||
.Lockout temporarily
|
||||
image:images/brute-force-temporarily.png[]
|
||||
|
||||
*Temporary Lockout Parameters*
|
||||
|
||||
|===
|
||||
|Name |Description |Default
|
||||
|
||||
|Max Login Failures
|
||||
|The maximum number of login failures.
|
||||
|30 failures
|
||||
|
||||
|Strategy to increase wait time
|
||||
|Strategy to increase the time a user will be temporarily disabled when the user's login attempts exceed _Max Login Failures_
|
||||
|Multiple
|
||||
|
||||
|Wait Increment
|
||||
|The time added to the time a user is temporarily disabled when the user's login attempts exceed _Max Login Failures_.
|
||||
|1 minute.
|
||||
|1 minute
|
||||
|
||||
|Max Wait
|
||||
|The maximum time a user is temporarily disabled.
|
||||
|15 minutes.
|
||||
|15 minutes
|
||||
|
||||
|Failure Reset Time
|
||||
|The time when the failure count resets. The timer runs from the last failed login. Make sure this number is always greater than `Max wait`; otherwise the effective
|
||||
wait time will never reach the value you have set to `Max wait`.
|
||||
|12 hours.
|
||||
|12 hours
|
||||
|
||||
|Quick Login Check Milliseconds
|
||||
|The minimum time between login attempts.
|
||||
|1000 milliseconds
|
||||
|
||||
|Minimum Quick Login Wait
|
||||
|The minimum time the user is disabled when login attempts are quicker than _Quick Login Check Milliseconds_.
|
||||
|1 minute
|
||||
|
||||
|===
|
||||
|
||||
|
@ -76,10 +118,15 @@ wait time will never reach the value you have set to `Max wait`.
|
|||
... Reset `count`
|
||||
.. Increment `count`
|
||||
.. Calculate `wait` according the brute force strategy defined (see below Strategies to set Wait Time).
|
||||
.. If `wait` equals is less than 0 and the time between this failure and the last failure is less than _Quick Login Check Milliseconds_, set `wait` to _Minimum Quick Login Wait_.
|
||||
.. If `wait` is less than or equals to 0 and the time between this failure and the last failure is less than _Quick Login Check Milliseconds_
|
||||
... set `wait` to _Minimum Quick Login Wait_
|
||||
.. if `wait` is greater than 0
|
||||
... Temporarily disable the user for the smallest of `wait` and _Max Wait_ seconds
|
||||
... Increment the temporary lockout counter
|
||||
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
`count` does not increment when a temporarily disabled account commits a login failure.
|
||||
====
|
||||
|
||||
|
@ -104,11 +151,11 @@ By multiples strategy, wait time is incremented when the number (or count) of fa
|
|||
|**10** |**30** | 5 | **60**
|
||||
|===
|
||||
|
||||
At the fifth failed attempt of the `Effective Wait Time`, the account is disabled for `30` seconds. After reaching the next multiple of `Max Login Failures`, in this case `10`, the time increases from `30` to `60` seconds.
|
||||
At the fifth failed attempt, the account is disabled for `30` seconds. After reaching the next multiple of `Max Login Failures`, in this case `10`, the time increases from `30` to `60` seconds.
|
||||
|
||||
The By multiple strategy uses the following formula to calculate wait time: _Wait Increment_ * (`count` / _Max Login Failures_). The division is an integer division rounded down to a whole number.
|
||||
The By multiple strategy uses the following formula to calculate wait time: _Wait Increment in Seconds_ * (`count` / _Max Login Failures_). The division is an integer division rounded down to a whole number.
|
||||
|
||||
For linear strategy, wait time is incremented when the number (or count) of failures equals or is greater than `Max Login Failure`. For instance, if you have set `Max Login Failures` to `5` and a `Wait Increment` to`30` seconds, the effective time that an account is disabled after several failed authentication attempts will be:
|
||||
For linear strategy, wait time is incremented when the `count` (or number) of failures is greater than or equals to `Max Login Failure`. For instance, if you have set `Max Login Failures` to `5` and a `Wait Increment` to`30` seconds, the effective time that an account is disabled after several failed authentication attempts will be:
|
||||
|
||||
[cols="1,1,1,1"]
|
||||
|===
|
||||
|
@ -125,33 +172,88 @@ For linear strategy, wait time is incremented when the number (or count) of fail
|
|||
|**10** |**30** | 5 | **180**
|
||||
|===
|
||||
|
||||
At the fifth failed attempt for the `Effective Wait Time`, the account is disabled for `30` seconds. Each new failed attempt increases wait time.
|
||||
At the fifth failed attempt, the account is disabled for `30` seconds. Each new failure increases wait time according value specified at `wait increment`.
|
||||
|
||||
The linear strategy uses the following formula to calculate wait time: _Wait Increment_ * (1 + `count` - _Max Login Failures_).
|
||||
The linear strategy uses the following formula to calculate wait time: _Wait Increment in Seconds_ * (1 + `count` - _Max Login Failures_).
|
||||
|
||||
*Permanent Lockout Parameters*
|
||||
==== Lockout permanently after temporary lockout
|
||||
Mixed mode. Locks user temporarily for specified number of times and then locks user permanently.
|
||||
|
||||
.Lockout permanently after temporary lockout
|
||||
image:images/brute-force-mixed.png[]
|
||||
|
||||
*Permanent lockout after temporary lockouts Parameters*
|
||||
|
||||
|===
|
||||
|Name |Description |Default
|
||||
|
||||
|Max temporary Lockouts
|
||||
|Max Login Failures
|
||||
|The maximum number of login failures.
|
||||
|30 failures
|
||||
|
||||
|Maximum temporary Lockouts
|
||||
|The maximum number of temporary lockouts permitted before permanent lockout occurs.
|
||||
|0
|
||||
|1
|
||||
|
||||
|Strategy to increase wait time
|
||||
|Strategy to increase the time a user will be temporarily disabled when the user's login attempts exceed _Max Login Failures_
|
||||
|Multiple
|
||||
|
||||
|Wait Increment
|
||||
|The time added to the time a user is temporarily disabled when the user's login attempts exceed _Max Login Failures_.
|
||||
|1 minute
|
||||
|
||||
|Max Wait
|
||||
|The maximum time a user is temporarily disabled.
|
||||
|15 minutes
|
||||
|
||||
|Failure Reset Time
|
||||
|The time when the failure count resets. The timer runs from the last failed login. Make sure this number is always greater than `Max wait`; otherwise the effective
|
||||
wait time will never reach the value you have set to `Max wait`.
|
||||
|12 hours
|
||||
|
||||
|Quick Login Check Milliseconds
|
||||
|The minimum time between login attempts.
|
||||
|1000 milliseconds
|
||||
|
||||
|Minimum Quick Login Wait
|
||||
|The minimum time the user is disabled when login attempts are quicker than _Quick Login Check Milliseconds_.
|
||||
|1 minute
|
||||
|
||||
|===
|
||||
|
||||
*Permanent Lockout Flow*
|
||||
*Permanent lockout after temporary lockouts Algorithm*
|
||||
====
|
||||
. Follow temporary lockout flow
|
||||
. If temporary lockout counter exceeds Max temporary lockouts
|
||||
.. Permanently disable user
|
||||
. On successful login
|
||||
.. Reset `count`
|
||||
.. Reset `temporary lockout` counter
|
||||
. On failed login
|
||||
.. If the time between this failure and the last failure is greater than _Failure Reset Time_
|
||||
... Reset `count`
|
||||
... Reset `temporary lockout` counter
|
||||
.. Increment `count`
|
||||
.. Calculate `wait` according the brute force strategy defined (see below Strategies to set Wait Time).
|
||||
.. If `wait` is less than or equals to 0 and the time between this failure and the last failure is less than _Quick Login Check Milliseconds_
|
||||
... set `wait` to _Minimum Quick Login Wait_
|
||||
... set `quick login failure` to `true``
|
||||
.. if `wait` and `Maximum temporary Lockouts` is greater than 0
|
||||
... set `wait` to the smallest of `wait` and _Max Wait_ seconds
|
||||
.. if `quick login failure` is `false`
|
||||
... Increment `temporary lockout` counter
|
||||
.. If `temporary lockout` counter exceeds `Maximum temporary lockouts`
|
||||
... Permanently locks the user
|
||||
.. Else
|
||||
... Temporarily blocks the user according `wait` value
|
||||
|
||||
When {project_name} disables a user, the user cannot log in until an administrator enables the user. Enabling an account resets the `count`.
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
`count` does not increment when a temporarily disabled account commits a login failure.
|
||||
====
|
||||
|
||||
==== Downside of {project_name} brute force detection
|
||||
|
||||
The downside of {project_name} brute force detection is that the server becomes vulnerable to denial of service attacks. When implementing a denial of service attack, an attacker can attempt to log in by guessing passwords for any accounts it knows and eventually causing {project_name} to disable the accounts.
|
||||
|
||||
Consider using intrusion prevention software (IPS). {project_name} logs every login failure and client IP address failure. You can point the IPS to the {project_name} server's log file, and the IPS can modify firewalls to block connections from these IP addresses.
|
||||
|
||||
==== Password policies
|
||||
|
||||
Ensure you have a complex password policy to force users to choose complex passwords. See the <<_password-policies, Password Policies>> chapter for more information. Prevent password guessing by setting up the {project_name} server to use one-time-passwords.
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
|
||||
=== Password policies
|
||||
|
||||
Ensure you have a complex password policy to force users to choose complex passwords. See the <<_password-policies, Password Policies>> chapter for more information. Prevent password guessing by setting up the {project_name} server to use one-time-passwords.
|
|
@ -0,0 +1,74 @@
|
|||
[[_scim]]
|
||||
|
||||
=== SCIM client capabilities
|
||||
|
||||
{project_name} includes a http://www.simplecloud.info[SCIM2] client allowing to :
|
||||
|
||||
* Declare SCIM endpoints (through the identity federation UI). Any tool implementing SCIM protocol can be wired to the
|
||||
{project_name} instance through this declaration.
|
||||
* Propagate users and groups from {project_name} to SCIM endpoints : when a user/group gets created or modified in {project_name},
|
||||
the modification is forwarded to all declared SCIM endpoints through SCIM calls within the transaction scope. If
|
||||
propagation fails, changes can be rolled back or not according to a configurable rollback strategy.
|
||||
* Synchronize users and groups from SCIM endpoints (through the {project_name} synchronization mechanism).
|
||||
|
||||
See https://datatracker.ietf.org/doc/html/rfc7643[RFC7643]
|
||||
and https://datatracker.ietf.org/doc/html/rfc7644[RFC7644] for further details
|
||||
|
||||
==== Enabling SCIM extension
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
This extension is currently in experimental mode, and requires the ```SCIM``` experimental Profile to be enabled
|
||||
====
|
||||
|
||||
.Procedure
|
||||
. Click on *Admin Console > Realm Settings > Events* in the menu.
|
||||
. Add `scim` to the list of event listeners
|
||||
image:images/scim-event-listener-page.png[Enable SCIM Event listeners]
|
||||
. Save
|
||||
|
||||
==== Registering SCIM Service Providers
|
||||
|
||||
.Procedure
|
||||
. Click on *User federation > Add Scim Providers*
|
||||
image:images/scim-federation-provider-page.png[Configure SCIM service provider]
|
||||
. Fill required fields according to the SCIM endpoint you are wiring
|
||||
. If you enable import during sync then you can choose between to following import actions:
|
||||
|
||||
- Create Local - adds users to keycloak
|
||||
- Nothing
|
||||
- Delete Remote - deletes users from the remote application
|
||||
|
||||
==== Sync
|
||||
|
||||
You can set up a periodic sync for all users or just changed users - it's not mandatory. You can either do:
|
||||
|
||||
- Periodic Full Sync
|
||||
- Periodic Changed User Sync
|
||||
|
||||
|
||||
==== Technical notes
|
||||
|
||||
===== Motivation
|
||||
|
||||
We want to build a unified collaborative platform based on multiple applications. To do that, we need a way to propagate
|
||||
immediately changes made in Keycloak to all these applications. And we want to keep using OIDC or SAML as the
|
||||
authentication protocol.
|
||||
|
||||
This will allow users to collaborate seamlessly across the platform without requiring every user to have connected once
|
||||
to each application. This will also ease GDRP compliance because deleting a user in Keycloak will delete the user from
|
||||
every app. The SCIM protocol is standard, comprehensible and easy to implement. It's a perfect fit for our goal.
|
||||
|
||||
We chose to build application extensions/plugins because it's easier to deploy and thus will benefit to a larger portion
|
||||
of the FOSS community.
|
||||
|
||||
===== Keycloak specific
|
||||
|
||||
This extension uses 3 concepts in KeyCloak :
|
||||
|
||||
- Event Listener : used to listen for changes within Keycloak (e.g. User creation, Group deletion...) and propagate
|
||||
them to registered SCIM service providers through SCIM requests.
|
||||
- Federation Provider : used to set up all the SCIM service providers endpoint without creating our own UI.
|
||||
- JPA Entity Provider : used to save the mapping between the local IDs and the service providers IDs.
|
||||
|
||||
It is based on https://github.com/Captain-P-Goldfish/SCIM-SDK[Scim SDK].
|
|
@ -22,7 +22,7 @@ These APIs are no longer needed as initialization is done automatically on deman
|
|||
= Virtual Threads enabled for Infinispan and JGroups thread pools
|
||||
|
||||
Starting from this release, {project_name} automatically enables the virtual thread pool support in both the embedded Infinispan and JGroups when running on OpenJDK 21.
|
||||
This removes the need to configure the thread pool and reduces overall memory footprint.
|
||||
This removes the need to configure the JGroups thread pool, the need to align the JGroups thread pool with the HTTP worker thread pool, and reduces the overall memory footprint.
|
||||
To disable the virtual threads, add one of the Java system properties combinations to your deployment:
|
||||
|
||||
* `-Dorg.infinispan.threads.virtual=false`: disables virtual thread in both Infinispan and JGroups.
|
||||
|
@ -41,6 +41,11 @@ To enable the previous behavior, choose the transport stack `udp`.
|
|||
|
||||
The {project_name} Operator will continue to configure `kubernetes` as a transport stack.
|
||||
|
||||
= Deprecated transport stacks for distributed caches
|
||||
|
||||
The `azure`, `ec2` and `google` transport stacks have been deprecated. Users should use the TCP based `jdbc-ping`
|
||||
stack as a direct replacement.
|
||||
|
||||
= Defining dependencies between provider factories
|
||||
|
||||
When developing extensions for {project_name}, developers can now specify dependencies between provider factories classes by implementing the method `dependsOn()` in the `ProviderFactory` interface.
|
||||
|
@ -49,3 +54,15 @@ See the Javadoc for a detailed description.
|
|||
= Removal of robots.txt file
|
||||
|
||||
The `robots.txt` file, previously included by default, is now removed. The default `robots.txt` file blocked all crawling, which prevented the `noindex`/`nofollow` directives from being followed. The desired default behaviour is for {project_name} pages to not show up in search engine results and this is accomplished by the existing `X-Robots-Tag` header, which is set to `none` by default. The value of this header can be overidden per-realm if a different behaviour is needed.
|
||||
|
||||
= Offline access removes the associated online session if the `offline_scope` is requested in the initial exchange
|
||||
|
||||
Any offline session in {project_name} is created from another online session. When the `offline_access` scope is requested, the current online session is used to create the associated offline session for the client. Therefore any `offline_access` request finished, until now, with two sessions, one online and one offline.
|
||||
|
||||
Starting with this version, {project_name} removes the initial online session if the `offline_scope` is directly requested as the first interaction for the session. The client retrieves the offline token after the code to token exchange that is associated to the offline session, but the previous online session is removed. If the online session has been used before the `offline_scope` request, by the same or another client, the online session remains active as today. Although the new behavior makes sense because the client application is just asking for an offline token, it can affect some scenarios that rely on having the online session still active after the initial `offline_scope` token request.
|
||||
|
||||
= New client scope `service_account` for `client_credentials` grant mappers
|
||||
|
||||
{project_name} introduces a new client scope at the realm level called `service_account` which is in charge of adding the specific claims for `client_credentials` grant (`client_id`, `clientHost` and `clientAddress`) via protocol mappers. This scope will be automatically assigned to and unassigned from the client when the `serviceAccountsEnabled` option is set or unset in the client configuration.
|
||||
|
||||
Previously, the three mappers (`Client Id`, `Client Host` and `Client IP Address`) where added directly to the dedicated scope when the client was configured to enable service accounts, and they were never removed.
|
|
@ -12,6 +12,15 @@ For a configuration where this is applied, visit <@links.ha id="deploy-keycloak-
|
|||
|
||||
== Concepts
|
||||
|
||||
=== JGroups communications
|
||||
|
||||
// remove this paragraph once OpenJDK 17 is no longer supported on the server side.
|
||||
// https://github.com/keycloak/keycloak/issues/31101
|
||||
|
||||
JGroups communications, which is used in single-site setups for the communication between {project_name} nodes, benefits from the use of virtual threads which are available in OpenJDK 21.
|
||||
This reduces the memory usage and removes the need to configure thread pool sizes.
|
||||
Therefore, the use of OpenJDK 21 is recommended.
|
||||
|
||||
=== Quarkus executor pool
|
||||
|
||||
{project_name} requests, as well as blocking probes, are handled by an executor pool. Depending on the available CPU cores, it has a maximum size of 50 or more threads.
|
||||
|
@ -31,32 +40,6 @@ If you increase the number of database connections and the number of threads too
|
|||
The number of database connections is configured via the link:{links_server_all-config_url}?q=db-pool[`Database` settings `db-pool-initial-size`, `db-pool-min-size` and `db-pool-max-size`] respectively.
|
||||
Low numbers ensure fast response times for all clients, even if there is an occasionally failing request when there is a load spike.
|
||||
|
||||
=== JGroups connection pool
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
* This currently applies to single-site setups only.
|
||||
In a multi-site setup with an external {jdgserver_name} this is not a restriction.
|
||||
* This currently applies if virtual threads are disabled.
|
||||
Since {project_name} 26.1, virtual threads are enabled in both embedded Infinispan and JGroups if running on OpenJDK 21 or higher.
|
||||
====
|
||||
|
||||
|
||||
The combined number of executor threads in all {project_name} nodes in the cluster should not exceed too much the number of threads available in JGroups thread pool to avoid the warning `thread pool is full (max=<value>, active=<value>)`.
|
||||
|
||||
The warning includes a thread dump when the Java system property `-Djgroups.thread_dumps_enabled=true` is set.
|
||||
It may incur in a penalty in performance collecting those thread dumps.
|
||||
|
||||
--
|
||||
include::partials/threads/executor-jgroups-thread-calculation.adoc[]
|
||||
--
|
||||
|
||||
Use metrics to monitor the total JGroup threads in the pool and for the threads active in the pool.
|
||||
When using TCP as the JGroups transport protocol, the metrics `vendor_jgroups_tcp_get_thread_pool_size` and `vendor_jgroups_tcp_get_thread_pool_size_active` are available for monitoring. When using UDP, the metrics `vendor_jgroups_udp_get_thread_pool_size` and `vendor_jgroups_udp_get_thread_pool_size_active` are available.
|
||||
This is useful to monitor that limiting the Quarkus thread pool size keeps the number of active JGroup threads below the maximum JGroup thread pool size.
|
||||
|
||||
WARNING: The metrics are not available when virtual threads are enabled in JGroups.
|
||||
|
||||
[#load-shedding]
|
||||
=== Load Shedding
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@ Use it together with the other building blocks outlined in the <@links.ha id="bb
|
|||
* Understanding of a <@links.operator id="basic-deployment" /> of {project_name} with the {project_name} Operator.
|
||||
* Aurora AWS database deployed using the <@links.ha id="deploy-aurora-multi-az" /> {section}.
|
||||
* {jdgserver_name} server deployed using the <@links.ha id="deploy-infinispan-kubernetes-crossdc" /> {section}.
|
||||
* Running {project_name} with OpenJDK 21, which is the default for the containers distributed for {project_name}, as this enabled virtual threads for the JGroups communication.
|
||||
|
||||
== Procedure
|
||||
|
||||
|
@ -46,8 +47,6 @@ See the <@links.ha id="concepts-database-connections" /> {section} for details.
|
|||
<5> To be able to analyze the system under load, enable the metrics endpoint.
|
||||
The disadvantage of the setting is that the metrics will be available at the external {project_name} endpoint, so you must add a filter so that the endpoint is not available from the outside.
|
||||
Use a reverse proxy in front of {project_name} to filter out those URLs.
|
||||
<6> You might consider limiting the number of {project_name} threads further because multiple concurrent threads will lead to throttling by Kubernetes once the requested CPU limit is reached.
|
||||
See the <@links.ha id="concepts-threads" /> {section} for details.
|
||||
|
||||
== Verifying the deployment
|
||||
|
||||
|
@ -70,7 +69,11 @@ spec:
|
|||
additionalOptions:
|
||||
include::examples/generated/keycloak.yaml[tag=keycloak-queue-size]
|
||||
----
|
||||
|
||||
All exceeding requests are served with an HTTP 503.
|
||||
|
||||
You might consider limiting the value for `http-pool-max-threads` further because multiple concurrent threads will lead to throttling by Kubernetes once the requested CPU limit is reached.
|
||||
|
||||
See the <@links.ha id="concepts-threads" /> {section} about load shedding for details.
|
||||
|
||||
== Optional: Disable sticky sessions
|
||||
|
|
|
@ -464,16 +464,16 @@ spec:
|
|||
- name: http-metrics-slos
|
||||
value: '5,10,25,50,250,500'
|
||||
# tag::keycloak[]
|
||||
# end::keycloak[]
|
||||
# tag::keycloak-queue-size[]
|
||||
- name: http-max-queued-requests
|
||||
value: "1000"
|
||||
# end::keycloak-queue-size[]
|
||||
# tag::keycloak[]
|
||||
- name: log-console-output
|
||||
value: json
|
||||
- name: metrics-enabled # <5>
|
||||
value: 'true'
|
||||
- name: http-pool-max-threads # <6>
|
||||
value: "200"
|
||||
# tag::keycloak-ispn[]
|
||||
- name: cache-remote-host # <1>
|
||||
value: "infinispan.keycloak.svc"
|
||||
|
|
|
@ -453,16 +453,16 @@ spec:
|
|||
- name: http-metrics-slos
|
||||
value: '5,10,25,50,250,500'
|
||||
# tag::keycloak[]
|
||||
# end::keycloak[]
|
||||
# tag::keycloak-queue-size[]
|
||||
- name: http-max-queued-requests
|
||||
value: "1000"
|
||||
# end::keycloak-queue-size[]
|
||||
# tag::keycloak[]
|
||||
- name: log-console-output
|
||||
value: json
|
||||
- name: metrics-enabled # <5>
|
||||
value: 'true'
|
||||
- name: http-pool-max-threads # <6>
|
||||
value: "66"
|
||||
# end::keycloak[]
|
||||
# This block is just for documentation purposes as we need both versions of Infinispan config, with and without numbers to corresponding options
|
||||
# tag::keycloak[]
|
||||
|
@ -510,6 +510,7 @@ spec:
|
|||
- name: JAVA_OPTS_APPEND # <5>
|
||||
value: ""
|
||||
ports:
|
||||
# end::keycloak[]
|
||||
# readinessProbe:
|
||||
# exec:
|
||||
# command:
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
The number of JGroup threads is `200` by default.
|
||||
While it can be configured using the property Java system property `jgroups.thread_pool.max_threads`, we advise keeping it at this value.
|
||||
As shown in experiments, the total number of Quarkus worker threads in the cluster should not exceed the number of threads in the JGroup thread pool of `200` in each node to avoid requests being dropped in the JGroups communication.
|
||||
Given a {project_name} cluster with four nodes, each node should then have around 50 Quarkus worker threads.
|
||||
Use the {project_name} configuration option `http-pool-max-threads` to configure the maximum number of Quarkus worker threads.
|
|
@ -12,15 +12,16 @@ An admin can do this through the admin console (or admin REST endpoints), but cl
|
|||
The Client Registration Service provides built-in support for {project_name} Client Representations, OpenID Connect Client Meta Data and SAML Entity Descriptors.
|
||||
The Client Registration Service endpoint is `/realms/<realm>/clients-registrations/<provider>`.
|
||||
|
||||
The built-in supported `providers` are:
|
||||
The built-in supported `providers` are:
|
||||
|
||||
* default - {project_name} Client Representation (JSON)
|
||||
* install - {project_name} Adapter Configuration (JSON)
|
||||
* openid-connect - OpenID Connect Client Metadata Description (JSON)
|
||||
* saml2-entity-descriptor - SAML Entity Descriptor (XML)
|
||||
|
||||
The following sections will describe how to use the different providers.
|
||||
The following sections will describe how to use the different providers.
|
||||
|
||||
[#_authentication]
|
||||
== Authentication
|
||||
|
||||
To invoke the Client Registration Services you usually need a token. The token can be a bearer token, an initial access token or a registration access token.
|
||||
|
@ -40,7 +41,7 @@ If you are using a bearer token to create clients it's recommend to use a token
|
|||
=== Initial Access Token
|
||||
|
||||
The recommended approach to registering new clients is by using initial access tokens.
|
||||
An initial access token can only be used to create clients and has a configurable expiration as well as a configurable limit on how many clients can be created.
|
||||
An initial access token can only be used to create clients and has a configurable expiration as well as a configurable limit on how many clients can be created.
|
||||
|
||||
An initial access token can be created through the admin console.
|
||||
To create a new initial access token first select the realm in the admin console, then click on `Client` in the menu on the left, followed by
|
||||
|
@ -53,12 +54,12 @@ many clients can be created using the token. After you click on `Save` the token
|
|||
It is important that you copy/paste this token now as you won't be able to retrieve it later. If you forget to copy/paste it, then delete the token and create another one.
|
||||
|
||||
The token value is used as a standard bearer token when invoking the Client Registration Services, by adding it to the Authorization header in the request.
|
||||
For example:
|
||||
For example:
|
||||
|
||||
[source]
|
||||
----
|
||||
Authorization: bearer eyJhbGciOiJSUz...
|
||||
----
|
||||
----
|
||||
|
||||
[[_registration_access_token]]
|
||||
=== Registration Access Token
|
||||
|
@ -82,16 +83,16 @@ console, including for example configuring protocol mappers.
|
|||
To create a client create a Client Representation (JSON) then perform an HTTP POST request to `/realms/<realm>/clients-registrations/default`.
|
||||
|
||||
It will return a Client Representation that also includes the registration access token.
|
||||
You should save the registration access token somewhere if you want to retrieve the config, update or delete the client later.
|
||||
You should save the registration access token somewhere if you want to retrieve the config, update or delete the client later.
|
||||
|
||||
To retrieve the Client Representation perform an HTTP GET request to `/realms/<realm>/clients-registrations/default/<client id>`.
|
||||
|
||||
It will also return a new registration access token.
|
||||
It will also return a new registration access token.
|
||||
|
||||
To update the Client Representation perform an HTTP PUT request with the updated Client Representation to:
|
||||
`/realms/<realm>/clients-registrations/default/<client id>`.
|
||||
|
||||
It will also return a new registration access token.
|
||||
It will also return a new registration access token.
|
||||
|
||||
To delete the Client Representation perform an HTTP DELETE request to:
|
||||
`/realms/<realm>/clients-registrations/default/<client id>`
|
||||
|
@ -100,12 +101,12 @@ To delete the Client Representation perform an HTTP DELETE request to:
|
|||
|
||||
The `installation` client registration provider can be used to retrieve the adapter configuration for a client.
|
||||
In addition to token authentication you can also authenticate with client credentials using HTTP basic authentication.
|
||||
To do this include the following header in the request:
|
||||
To do this include the following header in the request:
|
||||
|
||||
[source]
|
||||
----
|
||||
Authorization: basic BASE64(client-id + ':' + client-secret)
|
||||
----
|
||||
----
|
||||
|
||||
To retrieve the Adapter Configuration then perform an HTTP GET request to `/realms/<realm>/clients-registrations/install/<client id>`.
|
||||
|
||||
|
@ -146,7 +147,7 @@ curl -X POST \
|
|||
== Example using Java Client Registration API
|
||||
|
||||
The Client Registration Java API makes it easy to use the Client Registration Service using Java.
|
||||
To use include the dependency `org.keycloak:keycloak-client-registration-api:>VERSION<` from Maven.
|
||||
To use include the dependency `org.keycloak:keycloak-client-registration-api:>VERSION<` from Maven.
|
||||
|
||||
For full instructions on using the Client Registration refer to the JavaDocs.
|
||||
Below is an example of creating a client. You need to replace `eyJhbGciOiJSUz...` with a proper initial access token or bearer token.
|
||||
|
|
|
@ -7,6 +7,7 @@ priority=30
|
|||
summary="Client-side JavaScript library that can be used to secure web applications.">
|
||||
|
||||
{project_name} comes with a client-side JavaScript library called `keycloak-js` that can be used to secure web applications. The adapter also comes with built-in support for Cordova applications.
|
||||
The adapter uses OpenID Connect protocol under the covers. You can take a look at the <@links.securingapps id="oidc-layers" anchor="_oidc_available_endpoints"/> {section} for the more generic information about OpenID Connect endpoints and capabilities.
|
||||
|
||||
== Installation
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ priority=40
|
|||
summary="Node.js adapter to protect server-side JavaScript apps">
|
||||
|
||||
{project_name} provides a Node.js adapter built on top of https://github.com/senchalabs/connect[Connect] to protect server-side JavaScript apps - the goal was to be flexible enough to integrate with frameworks like https://expressjs.com/[Express.js].
|
||||
The adapter uses OpenID Connect protocol under the covers. You can take a look at the <@links.securingapps id="oidc-layers" anchor="_oidc_available_endpoints"/> {section} for the more generic information about OpenID Connect endpoints and capabilities.
|
||||
|
||||
ifeval::[{project_community}==true]
|
||||
The library can be downloaded directly from https://www.npmjs.com/package/keycloak-connect[ {project_name} organization] and the source is available at
|
||||
|
|
|
@ -6,7 +6,7 @@ title="Secure applications and services with OpenID Connect"
|
|||
priority=20
|
||||
summary="Using OpenID Connect with Keycloak to secure applications and services">
|
||||
|
||||
<#include "partials/oidc/available-endpoints.adoc" />
|
||||
include::partials/oidc/available-endpoints.adoc[]
|
||||
|
||||
include::partials/oidc/supported-grant-types.adoc[]
|
||||
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
|
||||
[#_oidc_available_endpoints]
|
||||
== Available Endpoints
|
||||
|
||||
As a fully-compliant OpenID Connect Provider implementation, {project_name} exposes a set of endpoints that applications
|
||||
|
|
|
@ -14,7 +14,8 @@ The current distributed cache implementation is built on top of https://infinisp
|
|||
== Enable distributed caching
|
||||
When you start {project_name} in production mode, by using the `start` command, caching is enabled and all {project_name} nodes in your network are discovered.
|
||||
|
||||
By default, caches are using a UDP transport stack so that nodes are discovered using IP multicast transport based on UDP. For most production environments, there are better discovery alternatives to UDP available. {project_name} allows you to either choose from a set of pre-defined default transport stacks, or to define your own custom stack, as you will see later in this {section}.
|
||||
By default, caches use the `jdbc-ping-udp` stack which is based upon a UDP transport and uses the configured database to track nodes joining the cluster.
|
||||
{project_name} allows you to either choose from a set of pre-defined default transport stacks, or to define your own custom stack, as you will see later in this {section}.
|
||||
|
||||
To explicitly enable distributed infinispan caching, enter this command:
|
||||
|
||||
|
@ -246,6 +247,10 @@ The following table shows transport stacks that are available using the `--cache
|
|||
|===
|
||||
|
||||
=== Additional transport stacks
|
||||
|
||||
IMPORTANT: The following stacks are deprecated. We recommend that you utilise the `jdbc-ping` stack in such environments
|
||||
as it does not require additional configuration or dependencies.
|
||||
|
||||
The following table shows transport stacks that are supported by {project_name}, but need some extra steps to work.
|
||||
Note that _none_ of these stacks are Kubernetes / OpenShift stacks, so no need exists to enable the `google` stack if you want to run {project_name} on top of the Google Kubernetes engine.
|
||||
In that case, use the `kubernetes` stack.
|
||||
|
|
|
@ -76,6 +76,9 @@ The table below summarizes the available metrics groups:
|
|||
|Cache
|
||||
|A set of metrics from Infinispan caches. See <@links.server id="caching"/> for more details.
|
||||
|
||||
|Keycloak
|
||||
|A set of metrics from Keycloak events. See <@links.server id="event-metrics"/> for more details.
|
||||
|
||||
|===
|
||||
|
||||
</@tmpl.guide>
|
||||
|
|
60
docs/guides/server/event-metrics.adoc
Normal file
|
@ -0,0 +1,60 @@
|
|||
<#import "/templates/guide.adoc" as tmpl>
|
||||
<#import "/templates/kc.adoc" as kc>
|
||||
<#import "/templates/options.adoc" as opts>
|
||||
<#import "/templates/links.adoc" as links>
|
||||
|
||||
<@tmpl.guide
|
||||
title="Enabling {project_name} Event Metrics"
|
||||
summary="Learn how to enable and use {project_name} Event Metrics"
|
||||
preview="true"
|
||||
includedOptions="metrics-enabled event-metrics-user-*">
|
||||
|
||||
Event metrics can provide admins an overview of the different activities in a {project_name} instance.
|
||||
For now, only metrics for user events are captured.
|
||||
For example, you can monitor the number of logins, login failures, or token refreshes performed.
|
||||
|
||||
The metrics are exposed using the standard metrics endpoint, and you can use it in your own metrics collection system to create dashboards and alerts.
|
||||
|
||||
The metrics are reported as counters per {project_name} instance.
|
||||
The counters are reset on the restart of the instance.
|
||||
If you have multiple instances running in a cluster, you will need to collect the metrics from all instances and aggregate them to get per a cluster view.
|
||||
|
||||
== Enable event metrics
|
||||
|
||||
To start collecting metrics, enable the feature `user-event-metrics`, enable metrics, and enable the metrics for user events.
|
||||
|
||||
The following shows the required startup parameters:
|
||||
|
||||
<@kc.start parameters="--features=user-event-metrics --metrics-enabled=true --event-metrics-user-enabled=true ..."/>
|
||||
|
||||
By default, there is a separate metric for each realm.
|
||||
To break down the metric by client and identity provider, you can add those metrics dimension using the configuration option `event-metrics-user-tags`.
|
||||
This can be useful on installations with a small number of clients and IDPs.
|
||||
This is not recommended for installations with a large number of clients or IDPs as it will increase the memory usage of {project_name} and as it will increase the load on your monitoring system.
|
||||
|
||||
The following shows how to configure {project_name} to break down the metrics by all three metrics dimensions:
|
||||
|
||||
<@kc.start parameters="... --event-metrics-user-tags=realm,idp,clientId ..."/>
|
||||
|
||||
You can limit the events for which {project_name} will expose metrics.
|
||||
|
||||
The following example limits the events collected to `LOGIN` and `LOGOUT` events:
|
||||
|
||||
<@kc.start parameters="... --event-metrics-user-events=login,logout ..."/>
|
||||
|
||||
All error events will be collected with the primary event type and will have the `error` tag filled with the error code.
|
||||
|
||||
The snippet below is an example of a response provided by the metric endpoint:
|
||||
|
||||
[source]
|
||||
----
|
||||
# HELP keycloak_user_events_total Keycloak user events
|
||||
# TYPE keycloak_user_events_total counter
|
||||
keycloak_user_events_total{client_id="security-admin-console",error="",event="code_to_token",idp="",realm="master",} 1.0
|
||||
keycloak_user_events_total{client_id="security-admin-console",error="",event="login",idp="",realm="master",} 1.0
|
||||
keycloak_user_events_total{client_id="security-admin-console",error="",event="logout",idp="",realm="master",} 1.0
|
||||
keycloak_user_events_total{client_id="security-admin-console",error="invalid_user_credentials",event="login",idp="",realm="master",} 1.0
|
||||
----
|
||||
|
||||
|
||||
</@tmpl.guide>
|
|
@ -16,7 +16,7 @@ The default count of users per file and per transaction is fifty.
|
|||
Increasing this to a larger number leads to an exponentially increasing execution time.
|
||||
====
|
||||
|
||||
All {project_name} nodes need to be stopped prior to using `kc.[sh|bat] import | export` commands. This ensures that the resulting operations will have no consistency issues with concurrent requests.
|
||||
All {project_name} nodes need to be stopped prior to using `kc.[sh|bat] import | export` commands. This ensures that the resulting operations will have no consistency issues with concurrent requests.
|
||||
It also ensures that running an import or export command from the same machine as a server instance will not result in port or other conflicts.
|
||||
|
||||
== Providing options for database connection parameters
|
||||
|
@ -31,7 +31,7 @@ As default, {project_name} will re-build automatically for the `export` and `imp
|
|||
If you have built an optimized version of {project_name} with the `build` command as outlined in <@links.server id="configuration"/>, use the command line option `--optimized` to have {project_name} skip the build check for a faster startup time.
|
||||
When doing this, remove the build time options from the command line and keep only the runtime options.
|
||||
|
||||
NOTE: if you do not use `--optimized` keep in mind that an `import` or `export` command will implicitly create or update an optimized image for you - if you are running the command from the same machine as a server instance, this may impact the next start of your server.
|
||||
NOTE: if you do not use `--optimized` keep in mind that an `import` or `export` command will implicitly create or update an optimized image for you - if you are running the command from the same machine as a server instance, this may impact the next start of your server.
|
||||
|
||||
== Exporting a Realm to a Directory
|
||||
|
||||
|
@ -130,8 +130,6 @@ realms and potentially lose state between server restarts.
|
|||
|
||||
To re-create realms you should explicitly run the `import` command prior to starting the server.
|
||||
|
||||
Importing the `master` realm is not supported because as it is a very sensitive operation.
|
||||
|
||||
== Importing and Exporting by using the Admin Console
|
||||
|
||||
You can also import and export a realm using the Admin Console. This functionality is
|
||||
|
@ -148,7 +146,7 @@ To export a realm using the Admin Console, perform these steps:
|
|||
. Click *Realm settings* in the menu.
|
||||
. Point to the *Action* menu in the top right corner of the realm settings screen, and select *Partial export*.
|
||||
+
|
||||
A list of resources appears along with the realm configuration.
|
||||
A list of resources appears along with the realm configuration.
|
||||
. Select the resources you want to export.
|
||||
. Click *Export*.
|
||||
|
||||
|
@ -162,7 +160,7 @@ In a similar way, you can import a previously exported realm. Perform these step
|
|||
|
||||
. Click *Realm settings* in the menu.
|
||||
. Point to the *Action* menu in the top right corner of the realm settings screen, and select *Partial import*.
|
||||
+
|
||||
+
|
||||
A prompt appears where you can select the file you want to import. Based on this file, you see the resources you can import along with the realm settings.
|
||||
. Click *Import*.
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@ fips
|
|||
management-interface
|
||||
health
|
||||
configuration-metrics
|
||||
event-metrics
|
||||
tracing
|
||||
importExport
|
||||
vault
|
||||
|
|
|
@ -42,7 +42,6 @@ import org.keycloak.models.UserModel;
|
|||
import org.keycloak.models.utils.reflection.Property;
|
||||
import org.keycloak.models.utils.reflection.PropertyCriteria;
|
||||
import org.keycloak.models.utils.reflection.PropertyQueries;
|
||||
import org.keycloak.storage.ldap.LDAPConfig;
|
||||
import org.keycloak.storage.ldap.idm.model.LDAPDn;
|
||||
import org.keycloak.storage.ldap.idm.model.LDAPObject;
|
||||
import org.keycloak.storage.ldap.idm.query.Condition;
|
||||
|
@ -373,7 +372,7 @@ public class LDAPUtils {
|
|||
* Map key are the attributes names in lower case
|
||||
*/
|
||||
public static Map<String, Property<Object>> getUserModelProperties(){
|
||||
|
||||
|
||||
Map<String, Property<Object>> userModelProps = PropertyQueries.createQuery(UserModel.class)
|
||||
.addCriteria(new PropertyCriteria() {
|
||||
|
||||
|
|
|
@ -569,6 +569,11 @@ public class GroupLDAPStorageMapper extends AbstractLDAPStorageMapper implements
|
|||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
if (!isGroupInGroupPath(realm, kcGroup)) {
|
||||
// group being inspected is not managed by this mapper - return empty collection
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
// TODO: with ranged search in AD we can improve the search using the specific range (not done for the moment)
|
||||
LDAPObject ldapGroup = loadLDAPGroupByName(kcGroup.getName());
|
||||
if (ldapGroup == null) {
|
||||
|
@ -703,18 +708,18 @@ public class GroupLDAPStorageMapper extends AbstractLDAPStorageMapper implements
|
|||
@Override
|
||||
public Stream<GroupModel> getGroupsStream() {
|
||||
Stream<GroupModel> ldapGroupMappings = getLDAPGroupMappingsConverted();
|
||||
if (config.getMode() == LDAPGroupMapperMode.LDAP_ONLY) {
|
||||
if (config.isTopLevelGroupsPath() && config.getMode() == LDAPGroupMapperMode.LDAP_ONLY) {
|
||||
// Use just group mappings from LDAP
|
||||
return ldapGroupMappings;
|
||||
} else {
|
||||
// Merge mappings from both DB and LDAP
|
||||
// Merge mappings from both DB and LDAP (including groups assigned from other group mappers)
|
||||
return Stream.concat(ldapGroupMappings, super.getGroupsStream());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void joinGroup(GroupModel group) {
|
||||
if (config.getMode() == LDAPGroupMapperMode.LDAP_ONLY) {
|
||||
if (config.getMode() == LDAPGroupMapperMode.LDAP_ONLY && isGroupInGroupPath(realm, group)) {
|
||||
// We need to create new role mappings in LDAP
|
||||
cachedLDAPGroupMappings = null;
|
||||
addGroupMappingInLDAP(realm, group, ldapUser);
|
||||
|
@ -725,6 +730,11 @@ public class GroupLDAPStorageMapper extends AbstractLDAPStorageMapper implements
|
|||
|
||||
@Override
|
||||
public void leaveGroup(GroupModel group) {
|
||||
// if user is leaving group not managed by this mapper, let the call proceed to the next mapper or to the DB.
|
||||
if (!isGroupInGroupPath(realm, group)) {
|
||||
super.leaveGroup(group);
|
||||
}
|
||||
|
||||
try (LDAPQuery ldapQuery = createGroupQuery(true)) {
|
||||
LDAPQueryConditionsBuilder conditionsBuilder = new LDAPQueryConditionsBuilder();
|
||||
Condition roleNameCondition = conditionsBuilder.equal(config.getGroupNameLdapAttribute(), group.getName());
|
||||
|
@ -756,7 +766,7 @@ public class GroupLDAPStorageMapper extends AbstractLDAPStorageMapper implements
|
|||
|
||||
@Override
|
||||
public boolean isMemberOf(GroupModel group) {
|
||||
return RoleUtils.isDirectMember(getGroupsStream(),group);
|
||||
return isGroupInGroupPath(realm, group) && RoleUtils.isDirectMember(getGroupsStream(),group);
|
||||
}
|
||||
|
||||
protected Stream<GroupModel> getLDAPGroupMappingsConverted() {
|
||||
|
@ -795,6 +805,23 @@ public class GroupLDAPStorageMapper extends AbstractLDAPStorageMapper implements
|
|||
return config.isTopLevelGroupsPath() ? null : KeycloakModelUtils.findGroupByPath(session, realm, config.getGroupsPath());
|
||||
}
|
||||
|
||||
protected boolean isGroupInGroupPath(RealmModel realm, GroupModel group) {
|
||||
if (config.isTopLevelGroupsPath()) {
|
||||
return true; // any group is in the path of the top level path.
|
||||
}
|
||||
GroupModel groupPathGroup = KeycloakModelUtils.findGroupByPath(session, realm, config.getGroupsPath());
|
||||
if (groupPathGroup != null) {
|
||||
while(!groupPathGroup.getId().equals(group.getId())) {
|
||||
group = group.getParent();
|
||||
if (group == null) {
|
||||
return false; // we checked every ancestor group, and none matches the group path group.
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new KC group from given LDAP group name in given KC parent group or the groups path.
|
||||
*/
|
||||
|
|
|
@ -248,7 +248,7 @@ public class MSADUserAccountControlStorageMapper extends AbstractLDAPStorageMapp
|
|||
|
||||
@Override
|
||||
public void setEnabled(boolean enabled) {
|
||||
if (ldapProvider.getEditMode() == UserStorageProvider.EditMode.WRITABLE && getPwdLastSet() > 0) {
|
||||
if (UserStorageProvider.EditMode.WRITABLE.equals(ldapProvider.getEditMode())) {
|
||||
MSADUserAccountControlStorageMapper.logger.debugf("Going to propagate enabled=%s for ldapUser '%s' to MSAD", enabled, ldapUser.getDn().toString());
|
||||
|
||||
UserAccountControl control = getUserAccountControl(ldapUser);
|
||||
|
|
|
@ -36,6 +36,7 @@
|
|||
<module>kerberos</module>
|
||||
<module>ldap</module>
|
||||
<module>sssd</module>
|
||||
<module>scim</module>
|
||||
</modules>
|
||||
|
||||
</project>
|
||||
|
|
83
federation/scim/pom.xml
Normal file
|
@ -0,0 +1,83 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<parent>
|
||||
<artifactId>keycloak-parent</artifactId>
|
||||
<groupId>org.keycloak</groupId>
|
||||
<version>999.0.0-SNAPSHOT</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>keycloak-scim-federation</artifactId>
|
||||
<name>Keycloak Federation from SCIM endpoints</name>
|
||||
<description>
|
||||
This extension adds SCIM2 client capabilities to Keycloak using [Scim SDK](https://github.com/Captain-P-Goldfish/SCIM-SDK).
|
||||
It allows to :
|
||||
* Declare SCIM endpoints (through the identity federation UI). Any tool implementing SCIM protocol can be wired to the
|
||||
Keycloak instance through this declaration.
|
||||
* Propagate users and groups from Keycloak to SCIM endpoints : when a user/group gets created or modified in Keycloak,
|
||||
the modification is forwarded to all declared SCIM endpoints through SCIM calls within the transaction scope. If
|
||||
propagation fails, changes can be rolled back or not according to a configurable rollback strategy.
|
||||
* Import users and groups from SCIM endpoints (through the Keycloak synchronization mechanism).
|
||||
See [RFC7643](https://datatracker.ietf.org/doc/html/rfc7643)
|
||||
and [RFC7644](https://datatracker.ietf.org/doc/html/rfc7644)) for further details
|
||||
</description>
|
||||
|
||||
<properties>
|
||||
<scim-sdk-version>1.26.0</scim-sdk-version>
|
||||
<r4j-version>2.2.0</r4j-version>
|
||||
<maven-wildfly-plugin.version>2.0.2.Final</maven-wildfly-plugin.version>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.keycloak</groupId>
|
||||
<artifactId>keycloak-core</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.keycloak</groupId>
|
||||
<artifactId>keycloak-server-spi</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.keycloak</groupId>
|
||||
<artifactId>keycloak-server-spi-private</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.keycloak</groupId>
|
||||
<artifactId>keycloak-services</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.keycloak</groupId>
|
||||
<artifactId>keycloak-model-jpa</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jboss.logging</groupId>
|
||||
<artifactId>jboss-logging</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.github.resilience4j</groupId>
|
||||
<artifactId>resilience4j-retry</artifactId>
|
||||
<version>${r4j-version}</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>de.captaingoldfish</groupId>
|
||||
<artifactId>scim-sdk-common</artifactId>
|
||||
<version>${scim-sdk-version}</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>de.captaingoldfish</groupId>
|
||||
<artifactId>scim-sdk-client</artifactId>
|
||||
<version>${scim-sdk-version}</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
</project>
|
|
@ -0,0 +1,179 @@
|
|||
package org.keycloak.federation.scim.core;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
import org.keycloak.component.ComponentModel;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.federation.scim.core.exceptions.ScimExceptionHandler;
|
||||
import org.keycloak.federation.scim.core.exceptions.ScimPropagationException;
|
||||
import org.keycloak.federation.scim.core.exceptions.SkipOrStopApproach;
|
||||
import org.keycloak.federation.scim.core.exceptions.SkipOrStopStrategy;
|
||||
import org.keycloak.federation.scim.core.service.AbstractScimService;
|
||||
import org.keycloak.federation.scim.core.service.GroupScimService;
|
||||
import org.keycloak.federation.scim.core.service.UserScimService;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* In charge of sending SCIM Request to all registered Scim endpoints.
|
||||
*/
|
||||
public class ScimDispatcher {
|
||||
|
||||
private static final Logger LOGGER = Logger.getLogger(ScimDispatcher.class);
|
||||
|
||||
private final KeycloakSession session;
|
||||
private final ScimExceptionHandler exceptionHandler;
|
||||
private final SkipOrStopStrategy skipOrStopStrategy;
|
||||
private final List<UserScimService> userScimServices = new ArrayList<>();
|
||||
private final List<GroupScimService> groupScimServices = new ArrayList<>();
|
||||
private boolean clientsInitialized = false;
|
||||
|
||||
public ScimDispatcher(KeycloakSession session) {
|
||||
this.session = session;
|
||||
this.exceptionHandler = new ScimExceptionHandler(session);
|
||||
// By default, use a permissive Skip or Stop strategy
|
||||
this.skipOrStopStrategy = SkipOrStopApproach.ALWAYS_SKIP_AND_CONTINUE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists all active ScimStorageProviderFactory and create new ScimClients for each of them
|
||||
*/
|
||||
public void refreshActiveScimEndpoints() {
|
||||
// Step 1: close existing clients (as configuration may have changed)
|
||||
groupScimServices.forEach(GroupScimService::close);
|
||||
groupScimServices.clear();
|
||||
userScimServices.forEach(UserScimService::close);
|
||||
userScimServices.clear();
|
||||
|
||||
// Step 2: Get All SCIM endpoints defined in Admin Console (enabled ScimStorageProviderFactory)
|
||||
session.getContext().getRealm().getComponentsStream().filter(
|
||||
m -> ScimEndpointConfigurationStorageProviderFactory.ID.equals(m.getProviderId()) && m.get("enabled", true))
|
||||
.forEach(scimEndpointConfigurationRaw -> {
|
||||
try {
|
||||
ScrimEndPointConfiguration scrimEndPointConfiguration = new ScrimEndPointConfiguration(
|
||||
scimEndpointConfigurationRaw);
|
||||
|
||||
// Step 3 : create scim clients for each endpoint
|
||||
if (scimEndpointConfigurationRaw.get(ScrimEndPointConfiguration.CONF_KEY_PROPAGATION_GROUP, false)) {
|
||||
GroupScimService groupScimService = new GroupScimService(session, scrimEndPointConfiguration,
|
||||
skipOrStopStrategy);
|
||||
groupScimServices.add(groupScimService);
|
||||
}
|
||||
if (scimEndpointConfigurationRaw.get(ScrimEndPointConfiguration.CONF_KEY_PROPAGATION_USER, false)) {
|
||||
UserScimService userScimService = new UserScimService(session, scrimEndPointConfiguration,
|
||||
skipOrStopStrategy);
|
||||
userScimServices.add(userScimService);
|
||||
}
|
||||
} catch (IllegalArgumentException e) {
|
||||
if (skipOrStopStrategy.allowInvalidEndpointConfiguration()) {
|
||||
LOGGER.warn("[SCIM] Invalid Endpoint configuration " + scimEndpointConfigurationRaw.getId(), e);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void dispatchUserModificationToAll(SCIMPropagationConsumer<UserScimService> operationToDispatch) {
|
||||
initializeClientsIfNeeded();
|
||||
Set<UserScimService> servicesCorrectlyPropagated = new LinkedHashSet<>();
|
||||
userScimServices.forEach(userScimService -> {
|
||||
try {
|
||||
operationToDispatch.acceptThrows(userScimService);
|
||||
servicesCorrectlyPropagated.add(userScimService);
|
||||
} catch (ScimPropagationException e) {
|
||||
exceptionHandler.handleException(userScimService.getConfiguration(), e);
|
||||
}
|
||||
});
|
||||
// TODO we could iterate on servicesCorrectlyPropagated to undo modification on already handled SCIM endpoints
|
||||
LOGGER.infof("[SCIM] User operation dispatched to %d SCIM server", servicesCorrectlyPropagated.size());
|
||||
}
|
||||
|
||||
public void dispatchGroupModificationToAll(SCIMPropagationConsumer<GroupScimService> operationToDispatch) {
|
||||
initializeClientsIfNeeded();
|
||||
Set<GroupScimService> servicesCorrectlyPropagated = new LinkedHashSet<>();
|
||||
groupScimServices.forEach(groupScimService -> {
|
||||
try {
|
||||
operationToDispatch.acceptThrows(groupScimService);
|
||||
servicesCorrectlyPropagated.add(groupScimService);
|
||||
} catch (ScimPropagationException e) {
|
||||
exceptionHandler.handleException(groupScimService.getConfiguration(), e);
|
||||
}
|
||||
});
|
||||
// TODO we could iterate on servicesCorrectlyPropagated to undo modification on already handled SCIM endpoints
|
||||
LOGGER.infof("[SCIM] Group operation dispatched to %d SCIM server", servicesCorrectlyPropagated.size());
|
||||
}
|
||||
|
||||
public void dispatchUserModificationToOne(ComponentModel scimServerConfiguration,
|
||||
SCIMPropagationConsumer<UserScimService> operationToDispatch) {
|
||||
initializeClientsIfNeeded();
|
||||
// Scim client should already have been created
|
||||
Optional<UserScimService> matchingClient = userScimServices.stream()
|
||||
.filter(u -> u.getConfiguration().getId().equals(scimServerConfiguration.getId())).findFirst();
|
||||
if (matchingClient.isPresent()) {
|
||||
try {
|
||||
operationToDispatch.acceptThrows(matchingClient.get());
|
||||
LOGGER.infof("[SCIM] User operation dispatched to SCIM server %s",
|
||||
matchingClient.get().getConfiguration().getName());
|
||||
} catch (ScimPropagationException e) {
|
||||
exceptionHandler.handleException(matchingClient.get().getConfiguration(), e);
|
||||
}
|
||||
} else {
|
||||
LOGGER.error("[SCIM] Could not find a Scim Client matching User endpoint configuration"
|
||||
+ scimServerConfiguration.getId());
|
||||
}
|
||||
}
|
||||
|
||||
public void dispatchGroupModificationToOne(ComponentModel scimServerConfiguration,
|
||||
SCIMPropagationConsumer<GroupScimService> operationToDispatch) {
|
||||
initializeClientsIfNeeded();
|
||||
// Scim client should already have been created
|
||||
Optional<GroupScimService> matchingClient = groupScimServices.stream()
|
||||
.filter(u -> u.getConfiguration().getId().equals(scimServerConfiguration.getId())).findFirst();
|
||||
if (matchingClient.isPresent()) {
|
||||
try {
|
||||
operationToDispatch.acceptThrows(matchingClient.get());
|
||||
LOGGER.infof("[SCIM] Group operation dispatched to SCIM server %s",
|
||||
matchingClient.get().getConfiguration().getName());
|
||||
} catch (ScimPropagationException e) {
|
||||
exceptionHandler.handleException(matchingClient.get().getConfiguration(), e);
|
||||
}
|
||||
} else {
|
||||
LOGGER.error("[SCIM] Could not find a Scim Client matching Group endpoint configuration"
|
||||
+ scimServerConfiguration.getId());
|
||||
}
|
||||
}
|
||||
|
||||
public void close() {
|
||||
for (GroupScimService c : groupScimServices) {
|
||||
c.close();
|
||||
}
|
||||
for (UserScimService c : userScimServices) {
|
||||
c.close();
|
||||
}
|
||||
groupScimServices.clear();
|
||||
userScimServices.clear();
|
||||
}
|
||||
|
||||
private void initializeClientsIfNeeded() {
|
||||
if (!clientsInitialized) {
|
||||
clientsInitialized = true;
|
||||
refreshActiveScimEndpoints();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A Consumer that throws ScimPropagationException.
|
||||
*
|
||||
* @param <T> An {@link AbstractScimService to call}
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface SCIMPropagationConsumer<T> {
|
||||
|
||||
void acceptThrows(T elem) throws ScimPropagationException;
|
||||
|
||||
}
|
||||
}
|
|
@ -0,0 +1,118 @@
|
|||
package org.keycloak.federation.scim.core;
|
||||
|
||||
import de.captaingoldfish.scim.sdk.common.constants.HttpHeader;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import org.apache.commons.lang3.BooleanUtils;
|
||||
import org.jboss.logging.Logger;
|
||||
import org.keycloak.component.ComponentModel;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.models.KeycloakSessionFactory;
|
||||
import org.keycloak.models.RealmModel;
|
||||
import org.keycloak.models.utils.KeycloakModelUtils;
|
||||
import org.keycloak.provider.ProviderConfigProperty;
|
||||
import org.keycloak.provider.ProviderConfigurationBuilder;
|
||||
import org.keycloak.storage.UserStorageProvider;
|
||||
import org.keycloak.storage.UserStorageProviderFactory;
|
||||
import org.keycloak.storage.UserStorageProviderModel;
|
||||
import org.keycloak.storage.user.ImportSynchronization;
|
||||
import org.keycloak.storage.user.SynchronizationResult;
|
||||
import org.keycloak.federation.scim.event.ScimBackgroundGroupMembershipUpdater;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Allows to register and configure Scim endpoints through Admin console, using the provided config properties.
|
||||
*/
|
||||
public class ScimEndpointConfigurationStorageProviderFactory implements
|
||||
UserStorageProviderFactory<ScimEndpointConfigurationStorageProviderFactory.ScimEndpointConfigurationStorageProvider>,
|
||||
ImportSynchronization {
|
||||
public static final String ID = "scim";
|
||||
private static final Logger LOGGER = Logger.getLogger(ScimEndpointConfigurationStorageProviderFactory.class);
|
||||
|
||||
@Override
|
||||
public String getId() {
|
||||
return ID;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SynchronizationResult sync(KeycloakSessionFactory sessionFactory, String realmId, UserStorageProviderModel model) {
|
||||
// Manually Launch a synchronization between keycloack and the SCIM endpoint described in the given model
|
||||
LOGGER.infof("[SCIM] Sync from ScimStorageProvider - Realm %s - Model %s", realmId, model.getName());
|
||||
SynchronizationResult result = new SynchronizationResult();
|
||||
KeycloakModelUtils.runJobInTransaction(sessionFactory, session -> {
|
||||
RealmModel realm = session.realms().getRealm(realmId);
|
||||
session.getContext().setRealm(realm);
|
||||
ScimDispatcher dispatcher = new ScimDispatcher(session);
|
||||
if (BooleanUtils.TRUE.equals(model.get(ScrimEndPointConfiguration.CONF_KEY_PROPAGATION_USER))) {
|
||||
dispatcher.dispatchUserModificationToOne(model, client -> client.sync(result));
|
||||
}
|
||||
if (BooleanUtils.TRUE.equals(model.get(ScrimEndPointConfiguration.CONF_KEY_PROPAGATION_GROUP))) {
|
||||
dispatcher.dispatchGroupModificationToOne(model, client -> client.sync(result));
|
||||
}
|
||||
dispatcher.close();
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SynchronizationResult syncSince(Date lastSync, KeycloakSessionFactory sessionFactory, String realmId,
|
||||
UserStorageProviderModel model) {
|
||||
return this.sync(sessionFactory, realmId, model);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postInit(KeycloakSessionFactory factory) {
|
||||
ScimBackgroundGroupMembershipUpdater scimBackgroundGroupMembershipUpdater = new ScimBackgroundGroupMembershipUpdater(
|
||||
factory);
|
||||
scimBackgroundGroupMembershipUpdater.startBackgroundUpdates();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ProviderConfigProperty> getConfigProperties() {
|
||||
// These Config Properties will be use to generate configuration page in Admin Console
|
||||
return ProviderConfigurationBuilder.create().property().name(ScrimEndPointConfiguration.CONF_KEY_ENDPOINT)
|
||||
.type(ProviderConfigProperty.STRING_TYPE).required(true).label("SCIM 2.0 endpoint")
|
||||
.helpText("External SCIM 2.0 base "
|
||||
+ "URL (/ServiceProviderConfig /Schemas and /ResourcesTypes should be accessible)")
|
||||
.add().property().name(ScrimEndPointConfiguration.CONF_KEY_CONTENT_TYPE).type(ProviderConfigProperty.LIST_TYPE)
|
||||
.label("Endpoint content type").helpText("Only used when endpoint doesn't support application/scim+json")
|
||||
.options(MediaType.APPLICATION_JSON, HttpHeader.SCIM_CONTENT_TYPE).defaultValue(HttpHeader.SCIM_CONTENT_TYPE)
|
||||
.add().property().name(ScrimEndPointConfiguration.CONF_KEY_AUTH_MODE).type(ProviderConfigProperty.LIST_TYPE)
|
||||
.label("Auth mode").helpText("Select the authorization mode").options("NONE", "BASIC_AUTH", "BEARER")
|
||||
.defaultValue("NONE").add().property().name(ScrimEndPointConfiguration.CONF_KEY_AUTH_USER)
|
||||
.type(ProviderConfigProperty.STRING_TYPE).label("Auth username").helpText("Required for basic authentication.")
|
||||
.add().property().name(ScrimEndPointConfiguration.CONF_KEY_AUTH_PASSWORD).type(ProviderConfigProperty.PASSWORD)
|
||||
.label("Auth password/token").helpText("Password or token required for basic or bearer authentication.").add()
|
||||
.property().name(ScrimEndPointConfiguration.CONF_KEY_PROPAGATION_USER).type(ProviderConfigProperty.BOOLEAN_TYPE)
|
||||
.label("Enable user propagation").helpText("Should operation on users be propagated to this provider?")
|
||||
.defaultValue(BooleanUtils.TRUE).add().property().name(ScrimEndPointConfiguration.CONF_KEY_PROPAGATION_GROUP)
|
||||
.type(ProviderConfigProperty.BOOLEAN_TYPE).label("Enable group propagation")
|
||||
.helpText("Should operation on groups be propagated to this provider?").defaultValue(BooleanUtils.TRUE).add()
|
||||
.property().name(ScrimEndPointConfiguration.CONF_KEY_SYNC_IMPORT).type(ProviderConfigProperty.BOOLEAN_TYPE)
|
||||
.label("Enable import during sync").add().property()
|
||||
.name(ScrimEndPointConfiguration.CONF_KEY_SYNC_IMPORT_ACTION).type(ProviderConfigProperty.LIST_TYPE)
|
||||
.label("Import action").helpText("What to do when the user doesn't exists in Keycloak.")
|
||||
.options("NOTHING", "CREATE_LOCAL", "DELETE_REMOTE").defaultValue("CREATE_LOCAL").add().property()
|
||||
.name(ScrimEndPointConfiguration.CONF_KEY_SYNC_REFRESH).type(ProviderConfigProperty.BOOLEAN_TYPE)
|
||||
.label("Enable refresh during sync").name(ScrimEndPointConfiguration.CONF_KEY_LOG_ALL_SCIM_REQUESTS)
|
||||
.type(ProviderConfigProperty.BOOLEAN_TYPE).label("Log SCIM requests and responses")
|
||||
.helpText("If true, all sent SCIM requests and responses will be logged").add().build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScimEndpointConfigurationStorageProvider create(KeycloakSession session, ComponentModel model) {
|
||||
return new ScimEndpointConfigurationStorageProvider();
|
||||
}
|
||||
|
||||
/**
|
||||
* Empty implementation : we used this {@link ScimEndpointConfigurationStorageProviderFactory} to generate Admin Console
|
||||
* page.
|
||||
*/
|
||||
public static final class ScimEndpointConfigurationStorageProvider implements UserStorageProvider {
|
||||
@Override
|
||||
public void close() {
|
||||
// Nothing to close here
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,100 @@
|
|||
package org.keycloak.federation.scim.core;
|
||||
|
||||
import de.captaingoldfish.scim.sdk.client.http.BasicAuth;
|
||||
import org.keycloak.component.ComponentModel;
|
||||
|
||||
public class ScrimEndPointConfiguration {
|
||||
// Configuration keys : also used in Admin Console page
|
||||
public static final String CONF_KEY_AUTH_MODE = "auth-mode";
|
||||
public static final String CONF_KEY_AUTH_PASSWORD = "auth-pass";
|
||||
public static final String CONF_KEY_AUTH_USER = "auth-user";
|
||||
public static final String CONF_KEY_CONTENT_TYPE = "content-type";
|
||||
public static final String CONF_KEY_ENDPOINT = "endpoint";
|
||||
public static final String CONF_KEY_SYNC_IMPORT_ACTION = "sync-import-action";
|
||||
public static final String CONF_KEY_SYNC_IMPORT = "sync-import";
|
||||
public static final String CONF_KEY_SYNC_REFRESH = "sync-refresh";
|
||||
public static final String CONF_KEY_PROPAGATION_USER = "propagation-user";
|
||||
public static final String CONF_KEY_PROPAGATION_GROUP = "propagation-group";
|
||||
public static final String CONF_KEY_LOG_ALL_SCIM_REQUESTS = "log-all-scim-requests";
|
||||
|
||||
private final String endPoint;
|
||||
private final String id;
|
||||
private final String name;
|
||||
private final String contentType;
|
||||
private final String authorizationHeaderValue;
|
||||
private final ImportAction importAction;
|
||||
private final boolean pullFromScimSynchronisationActivated;
|
||||
private final boolean pushToScimSynchronisationActivated;
|
||||
private final boolean logAllScimRequests;
|
||||
|
||||
public ScrimEndPointConfiguration(ComponentModel scimProviderConfiguration) {
|
||||
try {
|
||||
AuthMode authMode = AuthMode.valueOf(scimProviderConfiguration.get(CONF_KEY_AUTH_MODE));
|
||||
|
||||
authorizationHeaderValue = switch (authMode) {
|
||||
case BEARER -> "Bearer " + scimProviderConfiguration.get(CONF_KEY_AUTH_PASSWORD);
|
||||
case BASIC_AUTH -> {
|
||||
BasicAuth basicAuth = BasicAuth.builder().username(scimProviderConfiguration.get(CONF_KEY_AUTH_USER))
|
||||
.password(scimProviderConfiguration.get(CONF_KEY_AUTH_PASSWORD)).build();
|
||||
yield basicAuth.getAuthorizationHeaderValue();
|
||||
}
|
||||
case NONE -> "";
|
||||
};
|
||||
contentType = scimProviderConfiguration.get(CONF_KEY_CONTENT_TYPE, "");
|
||||
endPoint = scimProviderConfiguration.get(CONF_KEY_ENDPOINT, "");
|
||||
id = scimProviderConfiguration.getId();
|
||||
name = scimProviderConfiguration.getName();
|
||||
importAction = ImportAction.valueOf(scimProviderConfiguration.get(CONF_KEY_SYNC_IMPORT_ACTION));
|
||||
pullFromScimSynchronisationActivated = scimProviderConfiguration.get(CONF_KEY_SYNC_IMPORT, false);
|
||||
pushToScimSynchronisationActivated = scimProviderConfiguration.get(CONF_KEY_SYNC_REFRESH, false);
|
||||
logAllScimRequests = scimProviderConfiguration.get(CONF_KEY_LOG_ALL_SCIM_REQUESTS, false);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException(
|
||||
"authMode '" + scimProviderConfiguration.get(CONF_KEY_AUTH_MODE) + "' is not supported");
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isPushToScimSynchronisationActivated() {
|
||||
return pushToScimSynchronisationActivated;
|
||||
}
|
||||
|
||||
public boolean isPullFromScimSynchronisationActivated() {
|
||||
return pullFromScimSynchronisationActivated;
|
||||
}
|
||||
|
||||
public String getContentType() {
|
||||
return contentType;
|
||||
}
|
||||
|
||||
public String getAuthorizationHeaderValue() {
|
||||
return authorizationHeaderValue;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public ImportAction getImportAction() {
|
||||
return importAction;
|
||||
}
|
||||
|
||||
public String getEndPoint() {
|
||||
return endPoint;
|
||||
}
|
||||
|
||||
public boolean isLogAllScimRequests() {
|
||||
return logAllScimRequests;
|
||||
}
|
||||
|
||||
public enum AuthMode {
|
||||
BEARER, BASIC_AUTH, NONE
|
||||
}
|
||||
|
||||
public enum ImportAction {
|
||||
CREATE_LOCAL, DELETE_REMOTE, NOTHING
|
||||
}
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
package org.keycloak.federation.scim.core.exceptions;
|
||||
|
||||
public class InconsistentScimMappingException extends ScimPropagationException {
|
||||
public InconsistentScimMappingException(String message) {
|
||||
super(message);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
package org.keycloak.federation.scim.core.exceptions;
|
||||
|
||||
import de.captaingoldfish.scim.sdk.client.response.ServerResponse;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
public class InvalidResponseFromScimEndpointException extends ScimPropagationException {
|
||||
|
||||
private final transient Optional<ServerResponse> response;
|
||||
|
||||
public InvalidResponseFromScimEndpointException(ServerResponse response, String message) {
|
||||
super(message);
|
||||
this.response = Optional.of(response);
|
||||
}
|
||||
|
||||
public InvalidResponseFromScimEndpointException(String message, Exception e) {
|
||||
super(message, e);
|
||||
this.response = Optional.empty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Empty response can occur if a major exception was thrown while retrying the request.
|
||||
*/
|
||||
public Optional<ServerResponse> getResponse() {
|
||||
return response;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
package org.keycloak.federation.scim.core.exceptions;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.keycloak.federation.scim.core.ScrimEndPointConfiguration;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
public enum RollbackApproach implements RollbackStrategy {
|
||||
ALWAYS_ROLLBACK {
|
||||
@Override
|
||||
public boolean shouldRollback(ScrimEndPointConfiguration configuration, ScimPropagationException e) {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
NEVER_ROLLBACK {
|
||||
@Override
|
||||
public boolean shouldRollback(ScrimEndPointConfiguration configuration, ScimPropagationException e) {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
CRITICAL_ONLY_ROLLBACK {
|
||||
@Override
|
||||
public boolean shouldRollback(ScrimEndPointConfiguration configuration, ScimPropagationException e) {
|
||||
if (e instanceof InconsistentScimMappingException) {
|
||||
// Occurs when mapping between a SCIM resource and a keycloak user failed (missing, ambiguous..)
|
||||
// Log can be sufficient here, no rollback required
|
||||
return false;
|
||||
}
|
||||
if (e instanceof UnexpectedScimDataException) {
|
||||
// Occurs when a SCIM endpoint sends invalid date (e.g. group with empty name, user without ids...)
|
||||
// No rollback required : we cannot recover. This needs to be fixed in the SCIM endpoint data
|
||||
return false;
|
||||
}
|
||||
if (e instanceof InvalidResponseFromScimEndpointException invalidResponseFromScimEndpointException) {
|
||||
return shouldRollbackBecauseOfResponse(invalidResponseFromScimEndpointException);
|
||||
}
|
||||
// Should not occur
|
||||
throw new IllegalStateException("Unkown ScimPropagationException", e);
|
||||
}
|
||||
|
||||
private boolean shouldRollbackBecauseOfResponse(InvalidResponseFromScimEndpointException e) {
|
||||
// If we have a response
|
||||
return e.getResponse().map(r -> {
|
||||
// We consider that 404 are acceptable, otherwise rollback
|
||||
ArrayList<Integer> acceptableStatus = Lists.newArrayList(200, 204, 404);
|
||||
return !acceptableStatus.contains(r.getHttpStatus());
|
||||
}).orElse(
|
||||
// Never got an answer, server was either misconfigured or unreachable
|
||||
// No rollback in that case.
|
||||
false);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
package org.keycloak.federation.scim.core.exceptions;
|
||||
|
||||
import org.keycloak.federation.scim.core.ScrimEndPointConfiguration;
|
||||
|
||||
/**
|
||||
* In charge of deciding, when facing a SCIM-related issue during an operation (e.g User creation), whether we should : - Log
|
||||
* the issue and let the operation succeed in Keycloack database (potentially unsynchronising Keycloack with the SCIM servers) -
|
||||
* Rollback the whole operation
|
||||
*/
|
||||
public interface RollbackStrategy {
|
||||
|
||||
/**
|
||||
* Indicates whether we should rollback the whole transaction because of the given exception.
|
||||
*
|
||||
* @param configuration The SCIM Endpoint configuration for which the exception occured
|
||||
* @param e the exception that we have to handle
|
||||
* @return true if transaction should be rolled back, false if we should log and continue operation
|
||||
*/
|
||||
boolean shouldRollback(ScrimEndPointConfiguration configuration, ScimPropagationException e);
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
package org.keycloak.federation.scim.core.exceptions;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.federation.scim.core.ScrimEndPointConfiguration;
|
||||
|
||||
/**
|
||||
* In charge of dealing with SCIM exceptions by ignoring, logging or rollback transaction according to : - The context in which
|
||||
* it occurs (sync, user creation...) - The related SCIM endpoint and its configuration - The thrown exception itself
|
||||
*/
|
||||
public class ScimExceptionHandler {
|
||||
private static final Logger LOGGER = Logger.getLogger(ScimExceptionHandler.class);
|
||||
|
||||
private final KeycloakSession session;
|
||||
private final RollbackStrategy rollbackStrategy;
|
||||
|
||||
public ScimExceptionHandler(KeycloakSession session) {
|
||||
this(session, RollbackApproach.CRITICAL_ONLY_ROLLBACK);
|
||||
}
|
||||
|
||||
public ScimExceptionHandler(KeycloakSession session, RollbackStrategy rollbackStrategy) {
|
||||
this.session = session;
|
||||
this.rollbackStrategy = rollbackStrategy;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles the given exception by loggin and/or rollback transaction.
|
||||
*
|
||||
* @param scimProviderConfiguration the configuration of the endpoint for which the propagation exception occured
|
||||
* @param e the occuring exception
|
||||
*/
|
||||
public void handleException(ScrimEndPointConfiguration scimProviderConfiguration, ScimPropagationException e) {
|
||||
String errorMessage = "[SCIM] Error while propagating to SCIM endpoint " + scimProviderConfiguration.getName();
|
||||
if (rollbackStrategy.shouldRollback(scimProviderConfiguration, e)) {
|
||||
session.getTransactionManager().rollback();
|
||||
LOGGER.error("TRANSACTION ROLLBACK - " + errorMessage, e);
|
||||
} else {
|
||||
LOGGER.warn(errorMessage, e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
package org.keycloak.federation.scim.core.exceptions;
|
||||
|
||||
public abstract class ScimPropagationException extends Exception {
|
||||
|
||||
protected ScimPropagationException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
protected ScimPropagationException(String message, Exception e) {
|
||||
super(message, e);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,58 @@
|
|||
package org.keycloak.federation.scim.core.exceptions;
|
||||
|
||||
import org.keycloak.federation.scim.core.ScrimEndPointConfiguration;
|
||||
|
||||
public enum SkipOrStopApproach implements SkipOrStopStrategy {
|
||||
ALWAYS_SKIP_AND_CONTINUE {
|
||||
@Override
|
||||
public boolean allowPartialSynchronizationWhenPushingToScim(ScrimEndPointConfiguration configuration) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean allowPartialSynchronizationWhenPullingFromScim(ScrimEndPointConfiguration configuration) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean allowMissingMembersWhenPushingGroupToScim(ScrimEndPointConfiguration configuration) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean allowInvalidEndpointConfiguration() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean skipInvalidDataFromScimEndpoint(ScrimEndPointConfiguration configuration) {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
ALWAYS_STOP {
|
||||
@Override
|
||||
public boolean allowPartialSynchronizationWhenPushingToScim(ScrimEndPointConfiguration configuration) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean allowPartialSynchronizationWhenPullingFromScim(ScrimEndPointConfiguration configuration) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean allowMissingMembersWhenPushingGroupToScim(ScrimEndPointConfiguration configuration) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean allowInvalidEndpointConfiguration() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean skipInvalidDataFromScimEndpoint(ScrimEndPointConfiguration configuration) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,58 @@
|
|||
package org.keycloak.federation.scim.core.exceptions;
|
||||
|
||||
import org.keycloak.federation.scim.core.ScrimEndPointConfiguration;
|
||||
|
||||
/**
|
||||
* In charge of deciding, when facing a SCIM-related issue, whether we should : - log a warning, skip the problematic element
|
||||
* and continue the rest of the operation - stop immediately the whole operation (typically, a synchronisation between SCIM and
|
||||
* Keycloack)
|
||||
*/
|
||||
public interface SkipOrStopStrategy {
|
||||
/**
|
||||
* Indicates if, during a synchronisation from Keycloack to a SCIM endpoint, we should : - cancel the whole synchronisation
|
||||
* if an element CRUD fail, or - keep on with synchronisation, allowing a partial synchronisation
|
||||
*
|
||||
* @param configuration the configuration of the endpoint in which the error occurred
|
||||
* @return true if a partial synchronisation is allowed, false if we should stop the whole synchronisation at first issue
|
||||
*/
|
||||
boolean allowPartialSynchronizationWhenPushingToScim(ScrimEndPointConfiguration configuration);
|
||||
|
||||
/**
|
||||
* Indicates if, during a synchronisation from a SCIM endpoint to Keycloack, we should : - cancel the whole synchronisation
|
||||
* if an element CRUD fail, or - keep on with synchronisation, allowing a partial synchronisation
|
||||
*
|
||||
* @param configuration the configuration of the endpoint in which the error occurred
|
||||
* @return true if a partial synchronisation is allowed, false if we should interrupt the whole synchronisation at first
|
||||
* issue
|
||||
*/
|
||||
boolean allowPartialSynchronizationWhenPullingFromScim(ScrimEndPointConfiguration configuration);
|
||||
|
||||
/**
|
||||
* Indicates if, when we propagate a group creation or update to a SCIM endpoint and some of its members are not mapped to
|
||||
* SCIM, we should allow partial group update or interrupt completely.
|
||||
*
|
||||
* @param configuration the configuration of the endpoint in which the error occurred
|
||||
* @return true if a partial group update is allowed, false if we should interrupt the group update in case of any unmapped
|
||||
* member
|
||||
*/
|
||||
boolean allowMissingMembersWhenPushingGroupToScim(ScrimEndPointConfiguration configuration);
|
||||
|
||||
/**
|
||||
* Indicates if, when facing an invalid SCIM endpoint configuration (resulting in a unreachable SCIM server), we should stop
|
||||
* or ignore this configuration.
|
||||
*
|
||||
* @return true the invalid endpoint should be ignored, * false if we should interrupt the rest of the synchronisation
|
||||
*/
|
||||
boolean allowInvalidEndpointConfiguration();
|
||||
|
||||
/**
|
||||
* Indicates if, when trying to pull User or Groups from a SCIM endpoint, we encounter a invalid data (e.g. group with empty
|
||||
* name), we should : - Skip the invalid element pull and continue - Cancel the whole synchronisation
|
||||
*
|
||||
* @param configuration the configuration of the endpoint in which the error occurred
|
||||
* @return true if we should skip the invalid data synchronisation and pursue, false if we should interrupt immediately the
|
||||
* whole synchronisation
|
||||
*/
|
||||
boolean skipInvalidDataFromScimEndpoint(ScrimEndPointConfiguration configuration);
|
||||
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
package org.keycloak.federation.scim.core.exceptions;
|
||||
|
||||
public class UnexpectedScimDataException extends ScimPropagationException {
|
||||
public UnexpectedScimDataException(String message) {
|
||||
super(message);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,292 @@
|
|||
package org.keycloak.federation.scim.core.service;
|
||||
|
||||
import de.captaingoldfish.scim.sdk.common.resources.ResourceNode;
|
||||
import de.captaingoldfish.scim.sdk.common.resources.complex.Meta;
|
||||
import org.jboss.logging.Logger;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.models.RoleMapperModel;
|
||||
import org.keycloak.storage.user.SynchronizationResult;
|
||||
import org.keycloak.federation.scim.core.ScrimEndPointConfiguration;
|
||||
import org.keycloak.federation.scim.core.exceptions.InconsistentScimMappingException;
|
||||
import org.keycloak.federation.scim.core.exceptions.InvalidResponseFromScimEndpointException;
|
||||
import org.keycloak.federation.scim.core.exceptions.SkipOrStopStrategy;
|
||||
import org.keycloak.federation.scim.core.exceptions.UnexpectedScimDataException;
|
||||
import org.keycloak.federation.scim.jpa.ScimResourceDao;
|
||||
import org.keycloak.federation.scim.jpa.ScimResourceMapping;
|
||||
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* A service in charge of synchronisation (CRUD) between a Keykloak Role (UserModel, GroupModel) and a SCIM Resource
|
||||
* (User,Group).
|
||||
*
|
||||
* @param <K> The Keycloack Model (e.g. UserModel, GroupModel)
|
||||
* @param <S> The SCIM Resource (e.g. User, Group)
|
||||
*/
|
||||
public abstract class AbstractScimService<K extends RoleMapperModel, S extends ResourceNode> implements AutoCloseable {
|
||||
|
||||
private static final Logger LOGGER = Logger.getLogger(AbstractScimService.class);
|
||||
protected final SkipOrStopStrategy skipOrStopStrategy;
|
||||
private final KeycloakSession keycloakSession;
|
||||
private final ScrimEndPointConfiguration scimProviderConfiguration;
|
||||
private final ScimResourceType type;
|
||||
private final ScimClient<S> scimClient;
|
||||
|
||||
protected AbstractScimService(KeycloakSession keycloakSession, ScrimEndPointConfiguration scimProviderConfiguration,
|
||||
ScimResourceType type, SkipOrStopStrategy skipOrStopStrategy) {
|
||||
this.keycloakSession = keycloakSession;
|
||||
this.scimProviderConfiguration = scimProviderConfiguration;
|
||||
this.type = type;
|
||||
this.scimClient = ScimClient.open(scimProviderConfiguration, type);
|
||||
this.skipOrStopStrategy = skipOrStopStrategy;
|
||||
}
|
||||
|
||||
public void create(K roleMapperModel) throws InconsistentScimMappingException, InvalidResponseFromScimEndpointException {
|
||||
if (isMarkedToIgnore(roleMapperModel)) {
|
||||
// Silently return: resource is explicitly marked as to ignore
|
||||
return;
|
||||
}
|
||||
// If mapping, then we are trying to recreate a user that was already created by import
|
||||
KeycloakId id = getId(roleMapperModel);
|
||||
if (findMappingById(id).isPresent()) {
|
||||
throw new InconsistentScimMappingException(
|
||||
"Trying to create user with id " + id + ": id already exists in Keycloak database");
|
||||
}
|
||||
S scimForCreation = scimRequestBodyForCreate(roleMapperModel);
|
||||
EntityOnRemoteScimId externalId = scimClient.create(id, scimForCreation);
|
||||
createMapping(id, externalId);
|
||||
}
|
||||
|
||||
public void update(K roleMapperModel) throws InconsistentScimMappingException, InvalidResponseFromScimEndpointException {
|
||||
if (isMarkedToIgnore(roleMapperModel)) {
|
||||
// Silently return: resource is explicitly marked as to ignore
|
||||
return;
|
||||
}
|
||||
KeycloakId keycloakId = getId(roleMapperModel);
|
||||
EntityOnRemoteScimId entityOnRemoteScimId = findMappingById(keycloakId)
|
||||
.map(ScimResourceMapping::getExternalIdAsEntityOnRemoteScimId)
|
||||
.orElseThrow(() -> new InconsistentScimMappingException("Failed to find SCIM mapping for " + keycloakId));
|
||||
S scimForReplace = scimRequestBodyForUpdate(roleMapperModel, entityOnRemoteScimId);
|
||||
scimClient.update(entityOnRemoteScimId, scimForReplace);
|
||||
}
|
||||
|
||||
protected abstract S scimRequestBodyForUpdate(K roleMapperModel, EntityOnRemoteScimId externalId)
|
||||
throws InconsistentScimMappingException;
|
||||
|
||||
public void delete(KeycloakId id) throws InconsistentScimMappingException, InvalidResponseFromScimEndpointException {
|
||||
ScimResourceMapping resource = findMappingById(id).orElseThrow(() -> new InconsistentScimMappingException(
|
||||
"Failed to delete resource %s, scim mapping not found: ".formatted(id)));
|
||||
EntityOnRemoteScimId externalId = resource.getExternalIdAsEntityOnRemoteScimId();
|
||||
scimClient.delete(externalId);
|
||||
getScimResourceDao().delete(resource);
|
||||
}
|
||||
|
||||
public void pushAllResourcesToScim(SynchronizationResult syncRes)
|
||||
throws InvalidResponseFromScimEndpointException, InconsistentScimMappingException {
|
||||
LOGGER.info("[SCIM] Push resources to endpoint " + this.getConfiguration().getEndPoint());
|
||||
try (Stream<K> resourcesStream = getResourceStream()) {
|
||||
Set<K> resources = resourcesStream.collect(Collectors.toUnmodifiableSet());
|
||||
for (K resource : resources) {
|
||||
KeycloakId id = getId(resource);
|
||||
pushSingleResourceToScim(syncRes, resource, id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void pullAllResourcesFromScim(SynchronizationResult syncRes)
|
||||
throws UnexpectedScimDataException, InconsistentScimMappingException, InvalidResponseFromScimEndpointException {
|
||||
LOGGER.info("[SCIM] Pull resources from endpoint " + this.getConfiguration().getEndPoint());
|
||||
for (S resource : scimClient.listResources()) {
|
||||
pullSingleResourceFromScim(syncRes, resource);
|
||||
}
|
||||
}
|
||||
|
||||
private void pushSingleResourceToScim(SynchronizationResult syncRes, K resource, KeycloakId id)
|
||||
throws InvalidResponseFromScimEndpointException, InconsistentScimMappingException {
|
||||
try {
|
||||
LOGGER.infof("[SCIM] Reconciling local resource %s", id);
|
||||
if (shouldIgnoreForScimSynchronization(resource)) {
|
||||
LOGGER.infof("[SCIM] Skip local resource %s", id);
|
||||
return;
|
||||
}
|
||||
if (findMappingById(id).isPresent()) {
|
||||
LOGGER.info("[SCIM] Replacing it");
|
||||
update(resource);
|
||||
} else {
|
||||
LOGGER.info("[SCIM] Creating it");
|
||||
create(resource);
|
||||
}
|
||||
syncRes.increaseUpdated();
|
||||
} catch (InvalidResponseFromScimEndpointException e) {
|
||||
if (skipOrStopStrategy.allowPartialSynchronizationWhenPushingToScim(this.getConfiguration())) {
|
||||
LOGGER.warn("Error while syncing " + id + " to endpoint " + getConfiguration().getEndPoint(), e);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
} catch (InconsistentScimMappingException e) {
|
||||
if (skipOrStopStrategy.allowPartialSynchronizationWhenPushingToScim(this.getConfiguration())) {
|
||||
LOGGER.warn("Inconsistent data for element " + id + " and endpoint " + getConfiguration().getEndPoint(), e);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void pullSingleResourceFromScim(SynchronizationResult syncRes, S resource)
|
||||
throws UnexpectedScimDataException, InconsistentScimMappingException, InvalidResponseFromScimEndpointException {
|
||||
try {
|
||||
LOGGER.infof("[SCIM] Reconciling remote resource %s", resource);
|
||||
EntityOnRemoteScimId externalId = resource.getId().map(EntityOnRemoteScimId::new)
|
||||
.orElseThrow(() -> new UnexpectedScimDataException(
|
||||
"Remote SCIM resource doesn't have an id, cannot import it in Keycloak"));
|
||||
if (validMappingAlreadyExists(externalId))
|
||||
return;
|
||||
|
||||
// Here no keycloak user/group matching the SCIM external id exists
|
||||
// Try to match existing keycloak resource by properties (username, email, name)
|
||||
Optional<KeycloakId> mapped = matchKeycloakMappingByScimProperties(resource);
|
||||
if (mapped.isPresent()) {
|
||||
// If found a mapped, update
|
||||
LOGGER.info(
|
||||
"[SCIM] Matched SCIM resource " + externalId + " from properties with keycloak entity " + mapped.get());
|
||||
createMapping(mapped.get(), externalId);
|
||||
syncRes.increaseUpdated();
|
||||
} else {
|
||||
// If not, create it locally or deleting it remotely (according to the configured Import Action)
|
||||
createLocalOrDeleteRemote(syncRes, resource, externalId);
|
||||
}
|
||||
} catch (UnexpectedScimDataException e) {
|
||||
if (skipOrStopStrategy.skipInvalidDataFromScimEndpoint(getConfiguration())) {
|
||||
LOGGER.warn("[SCIM] Skipping element synchronisation because of invalid Scim Data for element "
|
||||
+ resource.getId() + " : " + e.getMessage(), e);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
} catch (InconsistentScimMappingException e) {
|
||||
if (skipOrStopStrategy.allowPartialSynchronizationWhenPullingFromScim(getConfiguration())) {
|
||||
LOGGER.warn("[SCIM] Skipping element synchronisation because of inconsistent mapping for element "
|
||||
+ resource.getId() + " : " + e.getMessage(), e);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
} catch (InvalidResponseFromScimEndpointException e) {
|
||||
// Can only occur in case of a DELETE_REMOTE conflict action
|
||||
if (skipOrStopStrategy.allowPartialSynchronizationWhenPullingFromScim(getConfiguration())) {
|
||||
LOGGER.warn("[SCIM] Could not delete SCIM resource " + resource.getId() + " during synchronisation", e);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private boolean validMappingAlreadyExists(EntityOnRemoteScimId externalId) {
|
||||
Optional<ScimResourceMapping> optionalMapping = getScimResourceDao().findByExternalId(externalId, type);
|
||||
// If an existing mapping exists, delete potential dangling references
|
||||
if (optionalMapping.isPresent()) {
|
||||
ScimResourceMapping mapping = optionalMapping.get();
|
||||
if (entityExists(mapping.getIdAsKeycloakId())) {
|
||||
LOGGER.info("[SCIM] Valid mapping found, skipping");
|
||||
return true;
|
||||
} else {
|
||||
LOGGER.info("[SCIM] Delete a dangling mapping");
|
||||
getScimResourceDao().delete(mapping);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private void createLocalOrDeleteRemote(SynchronizationResult syncRes, S resource, EntityOnRemoteScimId externalId)
|
||||
throws UnexpectedScimDataException, InconsistentScimMappingException, InvalidResponseFromScimEndpointException {
|
||||
switch (scimProviderConfiguration.getImportAction()) {
|
||||
case CREATE_LOCAL -> {
|
||||
LOGGER.info("[SCIM] Create local resource for SCIM resource " + externalId);
|
||||
KeycloakId id = createEntity(resource);
|
||||
createMapping(id, externalId);
|
||||
syncRes.increaseAdded();
|
||||
}
|
||||
case DELETE_REMOTE -> {
|
||||
LOGGER.info("[SCIM] Delete remote resource " + externalId);
|
||||
scimClient.delete(externalId);
|
||||
}
|
||||
case NOTHING -> LOGGER.info("[SCIM] Import action set to NOTHING");
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract S scimRequestBodyForCreate(K roleMapperModel) throws InconsistentScimMappingException;
|
||||
|
||||
protected abstract KeycloakId getId(K roleMapperModel);
|
||||
|
||||
protected abstract boolean isMarkedToIgnore(K roleMapperModel);
|
||||
|
||||
private void createMapping(KeycloakId keycloakId, EntityOnRemoteScimId externalId) {
|
||||
getScimResourceDao().create(keycloakId, externalId, type);
|
||||
}
|
||||
|
||||
protected ScimResourceDao getScimResourceDao() {
|
||||
return ScimResourceDao.newInstance(getKeycloakSession(), scimProviderConfiguration.getId());
|
||||
}
|
||||
|
||||
private Optional<ScimResourceMapping> findMappingById(KeycloakId keycloakId) {
|
||||
return getScimResourceDao().findById(keycloakId, type);
|
||||
}
|
||||
|
||||
private KeycloakSession getKeycloakSession() {
|
||||
return keycloakSession;
|
||||
}
|
||||
|
||||
protected abstract boolean shouldIgnoreForScimSynchronization(K resource);
|
||||
|
||||
protected abstract Stream<K> getResourceStream();
|
||||
|
||||
protected abstract KeycloakId createEntity(S resource) throws UnexpectedScimDataException, InconsistentScimMappingException;
|
||||
|
||||
protected abstract Optional<KeycloakId> matchKeycloakMappingByScimProperties(S resource)
|
||||
throws InconsistentScimMappingException;
|
||||
|
||||
protected abstract boolean entityExists(KeycloakId keycloakId);
|
||||
|
||||
public void sync(SynchronizationResult syncRes)
|
||||
throws InconsistentScimMappingException, InvalidResponseFromScimEndpointException, UnexpectedScimDataException {
|
||||
if (this.scimProviderConfiguration.isPullFromScimSynchronisationActivated()) {
|
||||
this.pullAllResourcesFromScim(syncRes);
|
||||
}
|
||||
if (this.scimProviderConfiguration.isPushToScimSynchronisationActivated()) {
|
||||
this.pushAllResourcesToScim(syncRes);
|
||||
}
|
||||
}
|
||||
|
||||
protected Meta newMetaLocation(EntityOnRemoteScimId externalId) {
|
||||
Meta meta = new Meta();
|
||||
URI uri = getUri(type, externalId);
|
||||
meta.setLocation(uri.toString());
|
||||
return meta;
|
||||
}
|
||||
|
||||
protected URI getUri(ScimResourceType type, EntityOnRemoteScimId externalId) {
|
||||
try {
|
||||
return new URI("%s/%s".formatted(type.getEndpoint(), externalId.asString()));
|
||||
} catch (URISyntaxException e) {
|
||||
throw new IllegalStateException(
|
||||
"should never occur: can not format URI for type %s and id %s".formatted(type, externalId), e);
|
||||
}
|
||||
}
|
||||
|
||||
protected KeycloakDao getKeycloakDao() {
|
||||
return new KeycloakDao(getKeycloakSession());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
scimClient.close();
|
||||
}
|
||||
|
||||
public ScrimEndPointConfiguration getConfiguration() {
|
||||
return scimProviderConfiguration;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
package org.keycloak.federation.scim.core.service;
|
||||
|
||||
public record EntityOnRemoteScimId(String asString) {
|
||||
}
|
|
@ -0,0 +1,130 @@
|
|||
package org.keycloak.federation.scim.core.service;
|
||||
|
||||
import de.captaingoldfish.scim.sdk.common.resources.Group;
|
||||
import de.captaingoldfish.scim.sdk.common.resources.complex.Meta;
|
||||
import de.captaingoldfish.scim.sdk.common.resources.multicomplex.Member;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.BooleanUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.jboss.logging.Logger;
|
||||
import org.keycloak.models.GroupModel;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.models.UserModel;
|
||||
import org.keycloak.federation.scim.core.ScrimEndPointConfiguration;
|
||||
import org.keycloak.federation.scim.core.exceptions.InconsistentScimMappingException;
|
||||
import org.keycloak.federation.scim.core.exceptions.SkipOrStopStrategy;
|
||||
import org.keycloak.federation.scim.core.exceptions.UnexpectedScimDataException;
|
||||
import org.keycloak.federation.scim.jpa.ScimResourceMapping;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public class GroupScimService extends AbstractScimService<GroupModel, Group> {
|
||||
private static final Logger LOGGER = Logger.getLogger(GroupScimService.class);
|
||||
|
||||
public GroupScimService(KeycloakSession keycloakSession, ScrimEndPointConfiguration scimProviderConfiguration,
|
||||
SkipOrStopStrategy skipOrStopStrategy) {
|
||||
super(keycloakSession, scimProviderConfiguration, ScimResourceType.GROUP, skipOrStopStrategy);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Stream<GroupModel> getResourceStream() {
|
||||
return getKeycloakDao().getGroupsStream();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean entityExists(KeycloakId keycloakId) {
|
||||
return getKeycloakDao().groupExists(keycloakId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Optional<KeycloakId> matchKeycloakMappingByScimProperties(Group resource) {
|
||||
Set<String> names = new TreeSet<>();
|
||||
resource.getId().ifPresent(names::add);
|
||||
resource.getDisplayName().ifPresent(names::add);
|
||||
try (Stream<GroupModel> groupsStream = getKeycloakDao().getGroupsStream()) {
|
||||
Optional<GroupModel> group = groupsStream.filter(groupModel -> names.contains(groupModel.getName())).findFirst();
|
||||
return group.map(GroupModel::getId).map(KeycloakId::new);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected KeycloakId createEntity(Group resource) throws UnexpectedScimDataException, InconsistentScimMappingException {
|
||||
String displayName = resource.getDisplayName().filter(StringUtils::isNotBlank)
|
||||
.orElseThrow(() -> new UnexpectedScimDataException(
|
||||
"Remote Scim group has empty name, can't create. Resource id = %s".formatted(resource.getId())));
|
||||
GroupModel group = getKeycloakDao().createGroup(displayName);
|
||||
List<Member> groupMembers = resource.getMembers();
|
||||
if (CollectionUtils.isNotEmpty(groupMembers)) {
|
||||
for (Member groupMember : groupMembers) {
|
||||
EntityOnRemoteScimId externalId = groupMember.getValue().map(EntityOnRemoteScimId::new)
|
||||
.orElseThrow(() -> new UnexpectedScimDataException(
|
||||
"can't create group member for group '%s' without id: ".formatted(displayName) + resource));
|
||||
KeycloakId userId = getScimResourceDao().findUserByExternalId(externalId)
|
||||
.map(ScimResourceMapping::getIdAsKeycloakId).orElseThrow(() -> new InconsistentScimMappingException(
|
||||
"can't find mapping for group member %s".formatted(externalId)));
|
||||
UserModel userModel = getKeycloakDao().getUserById(userId);
|
||||
userModel.joinGroup(group);
|
||||
}
|
||||
}
|
||||
return new KeycloakId(group.getId());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isMarkedToIgnore(GroupModel groupModel) {
|
||||
return BooleanUtils.TRUE.equals(groupModel.getFirstAttribute("scim-skip"));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected KeycloakId getId(GroupModel groupModel) {
|
||||
return new KeycloakId(groupModel.getId());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Group scimRequestBodyForCreate(GroupModel groupModel) throws InconsistentScimMappingException {
|
||||
Set<KeycloakId> members = getKeycloakDao().getGroupMembers(groupModel);
|
||||
Group group = new Group();
|
||||
group.setExternalId(groupModel.getId());
|
||||
group.setDisplayName(groupModel.getName());
|
||||
for (KeycloakId member : members) {
|
||||
Member groupMember = new Member();
|
||||
Optional<ScimResourceMapping> optionalGroupMemberMapping = getScimResourceDao().findUserById(member);
|
||||
if (optionalGroupMemberMapping.isPresent()) {
|
||||
ScimResourceMapping groupMemberMapping = optionalGroupMemberMapping.get();
|
||||
EntityOnRemoteScimId externalIdAsEntityOnRemoteScimId = groupMemberMapping
|
||||
.getExternalIdAsEntityOnRemoteScimId();
|
||||
groupMember.setValue(externalIdAsEntityOnRemoteScimId.asString());
|
||||
URI ref = getUri(ScimResourceType.USER, externalIdAsEntityOnRemoteScimId);
|
||||
groupMember.setRef(ref.toString());
|
||||
group.addMember(groupMember);
|
||||
} else {
|
||||
String message = "Unmapped member " + member + " for group " + groupModel.getId();
|
||||
if (skipOrStopStrategy.allowMissingMembersWhenPushingGroupToScim(this.getConfiguration())) {
|
||||
LOGGER.warn(message);
|
||||
} else {
|
||||
throw new InconsistentScimMappingException(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
return group;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Group scimRequestBodyForUpdate(GroupModel groupModel, EntityOnRemoteScimId externalId)
|
||||
throws InconsistentScimMappingException {
|
||||
Group group = scimRequestBodyForCreate(groupModel);
|
||||
group.setId(externalId.asString());
|
||||
Meta meta = newMetaLocation(externalId);
|
||||
group.setMeta(meta);
|
||||
return group;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean shouldIgnoreForScimSynchronization(GroupModel resource) {
|
||||
return false;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,76 @@
|
|||
package org.keycloak.federation.scim.core.service;
|
||||
|
||||
import org.keycloak.models.GroupModel;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.models.RealmModel;
|
||||
import org.keycloak.models.UserModel;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public class KeycloakDao {
|
||||
|
||||
private final KeycloakSession keycloakSession;
|
||||
|
||||
public KeycloakDao(KeycloakSession keycloakSession) {
|
||||
this.keycloakSession = keycloakSession;
|
||||
}
|
||||
|
||||
private KeycloakSession getKeycloakSession() {
|
||||
return keycloakSession;
|
||||
}
|
||||
|
||||
private RealmModel getRealm() {
|
||||
return getKeycloakSession().getContext().getRealm();
|
||||
}
|
||||
|
||||
public boolean groupExists(KeycloakId groupId) {
|
||||
GroupModel group = getKeycloakSession().groups().getGroupById(getRealm(), groupId.asString());
|
||||
return group != null;
|
||||
}
|
||||
|
||||
public boolean userExists(KeycloakId userId) {
|
||||
UserModel user = getUserById(userId);
|
||||
return user != null;
|
||||
}
|
||||
|
||||
public UserModel getUserById(KeycloakId userId) {
|
||||
return getKeycloakSession().users().getUserById(getRealm(), userId.asString());
|
||||
}
|
||||
|
||||
public GroupModel getGroupById(KeycloakId groupId) {
|
||||
return getKeycloakSession().groups().getGroupById(getRealm(), groupId.asString());
|
||||
}
|
||||
|
||||
public Stream<GroupModel> getGroupsStream() {
|
||||
return getKeycloakSession().groups().getGroupsStream(getRealm());
|
||||
}
|
||||
|
||||
public GroupModel createGroup(String displayName) {
|
||||
return getKeycloakSession().groups().createGroup(getRealm(), displayName);
|
||||
}
|
||||
|
||||
public Set<KeycloakId> getGroupMembers(GroupModel groupModel) {
|
||||
return getKeycloakSession().users().getGroupMembersStream(getRealm(), groupModel).map(UserModel::getId)
|
||||
.map(KeycloakId::new).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
public Stream<UserModel> getUsersStream() {
|
||||
return getKeycloakSession().users().searchForUserStream(getRealm(), Collections.emptyMap());
|
||||
}
|
||||
|
||||
public UserModel getUserByUsername(String username) {
|
||||
return getKeycloakSession().users().getUserByUsername(getRealm(), username);
|
||||
}
|
||||
|
||||
public UserModel getUserByEmail(String email) {
|
||||
return getKeycloakSession().users().getUserByEmail(getRealm(), email);
|
||||
}
|
||||
|
||||
public UserModel addUser(String username) {
|
||||
return getKeycloakSession().users().addUser(getRealm(), username);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
package org.keycloak.federation.scim.core.service;
|
||||
|
||||
public record KeycloakId(String asString) {
|
||||
|
||||
}
|
|
@ -0,0 +1,138 @@
|
|||
package org.keycloak.federation.scim.core.service;
|
||||
|
||||
import com.google.common.net.HttpHeaders;
|
||||
import de.captaingoldfish.scim.sdk.client.ScimClientConfig;
|
||||
import de.captaingoldfish.scim.sdk.client.ScimRequestBuilder;
|
||||
import de.captaingoldfish.scim.sdk.client.response.ServerResponse;
|
||||
import de.captaingoldfish.scim.sdk.common.resources.ResourceNode;
|
||||
import de.captaingoldfish.scim.sdk.common.response.ListResponse;
|
||||
import io.github.resilience4j.core.IntervalFunction;
|
||||
import io.github.resilience4j.retry.Retry;
|
||||
import io.github.resilience4j.retry.RetryConfig;
|
||||
import io.github.resilience4j.retry.RetryRegistry;
|
||||
import jakarta.ws.rs.ProcessingException;
|
||||
import org.jboss.logging.Logger;
|
||||
import org.keycloak.federation.scim.core.ScrimEndPointConfiguration;
|
||||
import org.keycloak.federation.scim.core.exceptions.InvalidResponseFromScimEndpointException;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
public class ScimClient<S extends ResourceNode> implements AutoCloseable {
|
||||
private static final Logger LOGGER = Logger.getLogger(ScimClient.class);
|
||||
|
||||
private final RetryRegistry retryRegistry;
|
||||
|
||||
private final ScimRequestBuilder scimRequestBuilder;
|
||||
|
||||
private final ScimResourceType scimResourceType;
|
||||
private final boolean logAllRequests;
|
||||
|
||||
private ScimClient(ScimRequestBuilder scimRequestBuilder, ScimResourceType scimResourceType, boolean detailedLogs) {
|
||||
this.scimRequestBuilder = scimRequestBuilder;
|
||||
this.scimResourceType = scimResourceType;
|
||||
RetryConfig retryConfig = RetryConfig.custom().maxAttempts(10).intervalFunction(IntervalFunction.ofExponentialBackoff())
|
||||
.retryExceptions(ProcessingException.class).build();
|
||||
retryRegistry = RetryRegistry.of(retryConfig);
|
||||
this.logAllRequests = detailedLogs;
|
||||
}
|
||||
|
||||
public static <T extends ResourceNode> ScimClient<T> open(ScrimEndPointConfiguration scimProviderConfiguration,
|
||||
ScimResourceType scimResourceType) {
|
||||
String scimApplicationBaseUrl = scimProviderConfiguration.getEndPoint();
|
||||
Map<String, String> httpHeaders = new HashMap<>();
|
||||
httpHeaders.put(HttpHeaders.AUTHORIZATION, scimProviderConfiguration.getAuthorizationHeaderValue());
|
||||
httpHeaders.put(HttpHeaders.CONTENT_TYPE, scimProviderConfiguration.getContentType());
|
||||
ScimClientConfig scimClientConfig = ScimClientConfig.builder().httpHeaders(httpHeaders).connectTimeout(5)
|
||||
.requestTimeout(5).socketTimeout(5).build();
|
||||
ScimRequestBuilder scimRequestBuilder = new ScimRequestBuilder(scimApplicationBaseUrl, scimClientConfig);
|
||||
return new ScimClient<>(scimRequestBuilder, scimResourceType, scimProviderConfiguration.isLogAllScimRequests());
|
||||
}
|
||||
|
||||
public EntityOnRemoteScimId create(KeycloakId id, S scimForCreation) throws InvalidResponseFromScimEndpointException {
|
||||
Optional<String> scimForCreationId = scimForCreation.getId();
|
||||
if (scimForCreationId.isPresent()) {
|
||||
throw new IllegalArgumentException(
|
||||
"User to create should never have an existing id: %s %s".formatted(id, scimForCreationId.get()));
|
||||
}
|
||||
try {
|
||||
Retry retry = retryRegistry.retry("create-%s".formatted(id.asString()));
|
||||
if (logAllRequests) {
|
||||
LOGGER.info("[SCIM] Sending CREATE " + scimForCreation.toPrettyString() + "\n to " + getScimEndpoint());
|
||||
}
|
||||
ServerResponse<S> response = retry.executeSupplier(() -> scimRequestBuilder
|
||||
.create(getResourceClass(), getScimEndpoint()).setResource(scimForCreation).sendRequest());
|
||||
checkResponseIsSuccess(response);
|
||||
S resource = response.getResource();
|
||||
return resource.getId().map(EntityOnRemoteScimId::new).orElseThrow(
|
||||
() -> new InvalidResponseFromScimEndpointException(response, "Created SCIM resource does not have id"));
|
||||
|
||||
} catch (Exception e) {
|
||||
LOGGER.warn(e);
|
||||
throw new InvalidResponseFromScimEndpointException("Exception while retrying create " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
private void checkResponseIsSuccess(ServerResponse<S> response) throws InvalidResponseFromScimEndpointException {
|
||||
if (logAllRequests) {
|
||||
LOGGER.info("[SCIM] Server response " + response.getHttpStatus() + "\n" + response.getResponseBody());
|
||||
}
|
||||
if (!response.isSuccess()) {
|
||||
throw new InvalidResponseFromScimEndpointException(response,
|
||||
"Server answered with status " + response.getResponseBody() + ": " + response.getResponseBody());
|
||||
}
|
||||
}
|
||||
|
||||
private String getScimEndpoint() {
|
||||
return scimResourceType.getEndpoint();
|
||||
}
|
||||
|
||||
private Class<S> getResourceClass() {
|
||||
return scimResourceType.getResourceClass();
|
||||
}
|
||||
|
||||
public void update(EntityOnRemoteScimId externalId, S scimForReplace) throws InvalidResponseFromScimEndpointException {
|
||||
Retry retry = retryRegistry.retry("replace-%s".formatted(externalId.asString()));
|
||||
try {
|
||||
if (logAllRequests) {
|
||||
LOGGER.info("[SCIM] Sending UPDATE " + scimForReplace.toPrettyString() + "\n to " + getScimEndpoint());
|
||||
}
|
||||
ServerResponse<S> response = retry.executeSupplier(
|
||||
() -> scimRequestBuilder.update(getResourceClass(), getScimEndpoint(), externalId.asString())
|
||||
.setResource(scimForReplace).sendRequest());
|
||||
checkResponseIsSuccess(response);
|
||||
} catch (Exception e) {
|
||||
LOGGER.warn(e);
|
||||
throw new InvalidResponseFromScimEndpointException("Exception while retrying update " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
public void delete(EntityOnRemoteScimId externalId) throws InvalidResponseFromScimEndpointException {
|
||||
Retry retry = retryRegistry.retry("delete-%s".formatted(externalId.asString()));
|
||||
if (logAllRequests) {
|
||||
LOGGER.info("[SCIM] Sending DELETE to " + getScimEndpoint());
|
||||
}
|
||||
try {
|
||||
ServerResponse<S> response = retry.executeSupplier(() -> scimRequestBuilder
|
||||
.delete(getResourceClass(), getScimEndpoint(), externalId.asString()).sendRequest());
|
||||
checkResponseIsSuccess(response);
|
||||
} catch (Exception e) {
|
||||
LOGGER.warn(e);
|
||||
throw new InvalidResponseFromScimEndpointException("Exception while retrying delete " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
scimRequestBuilder.close();
|
||||
}
|
||||
|
||||
public List<S> listResources() {
|
||||
ServerResponse<ListResponse<S>> response = scimRequestBuilder.list(getResourceClass(), getScimEndpoint()).get()
|
||||
.sendRequest();
|
||||
ListResponse<S> resourceTypeListResponse = response.getResource();
|
||||
return resourceTypeListResponse.getListedResources();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
package org.keycloak.federation.scim.core.service;
|
||||
|
||||
import de.captaingoldfish.scim.sdk.common.resources.Group;
|
||||
import de.captaingoldfish.scim.sdk.common.resources.ResourceNode;
|
||||
import de.captaingoldfish.scim.sdk.common.resources.User;
|
||||
|
||||
public enum ScimResourceType {
|
||||
|
||||
USER("/Users", User.class),
|
||||
|
||||
GROUP("/Groups", Group.class);
|
||||
|
||||
private final String endpoint;
|
||||
|
||||
private final Class<? extends ResourceNode> resourceClass;
|
||||
|
||||
ScimResourceType(String endpoint, Class<? extends ResourceNode> resourceClass) {
|
||||
this.endpoint = endpoint;
|
||||
this.resourceClass = resourceClass;
|
||||
}
|
||||
|
||||
public String getEndpoint() {
|
||||
return endpoint;
|
||||
}
|
||||
|
||||
public <T extends ResourceNode> Class<T> getResourceClass() {
|
||||
return (Class<T>) resourceClass;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,130 @@
|
|||
package org.keycloak.federation.scim.core.service;
|
||||
|
||||
import de.captaingoldfish.scim.sdk.common.resources.User;
|
||||
import de.captaingoldfish.scim.sdk.common.resources.complex.Meta;
|
||||
import de.captaingoldfish.scim.sdk.common.resources.complex.Name;
|
||||
import de.captaingoldfish.scim.sdk.common.resources.multicomplex.Email;
|
||||
import de.captaingoldfish.scim.sdk.common.resources.multicomplex.MultiComplexNode;
|
||||
import de.captaingoldfish.scim.sdk.common.resources.multicomplex.PersonRole;
|
||||
import org.apache.commons.lang3.BooleanUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.jboss.logging.Logger;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.models.RoleMapperModel;
|
||||
import org.keycloak.models.RoleModel;
|
||||
import org.keycloak.models.UserModel;
|
||||
import org.keycloak.federation.scim.core.ScrimEndPointConfiguration;
|
||||
import org.keycloak.federation.scim.core.exceptions.InconsistentScimMappingException;
|
||||
import org.keycloak.federation.scim.core.exceptions.SkipOrStopStrategy;
|
||||
import org.keycloak.federation.scim.core.exceptions.UnexpectedScimDataException;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public class UserScimService extends AbstractScimService<UserModel, User> {
|
||||
private static final Logger LOGGER = Logger.getLogger(UserScimService.class);
|
||||
|
||||
public UserScimService(KeycloakSession keycloakSession, ScrimEndPointConfiguration scimProviderConfiguration,
|
||||
SkipOrStopStrategy skipOrStopStrategy) {
|
||||
super(keycloakSession, scimProviderConfiguration, ScimResourceType.USER, skipOrStopStrategy);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Stream<UserModel> getResourceStream() {
|
||||
return getKeycloakDao().getUsersStream();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean entityExists(KeycloakId keycloakId) {
|
||||
return getKeycloakDao().userExists(keycloakId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Optional<KeycloakId> matchKeycloakMappingByScimProperties(User resource) throws InconsistentScimMappingException {
|
||||
Optional<KeycloakId> matchedByUsername = resource.getUserName().map(getKeycloakDao()::getUserByUsername)
|
||||
.map(this::getId);
|
||||
Optional<KeycloakId> matchedByEmail = resource.getEmails().stream().findFirst().flatMap(MultiComplexNode::getValue)
|
||||
.map(getKeycloakDao()::getUserByEmail).map(this::getId);
|
||||
if (matchedByUsername.isPresent() && matchedByEmail.isPresent() && !matchedByUsername.equals(matchedByEmail)) {
|
||||
String inconstencyErrorMessage = "Found 2 possible users for remote user " + matchedByUsername.get() + " - "
|
||||
+ matchedByEmail.get();
|
||||
LOGGER.warn(inconstencyErrorMessage);
|
||||
throw new InconsistentScimMappingException(inconstencyErrorMessage);
|
||||
}
|
||||
if (matchedByUsername.isPresent()) {
|
||||
return matchedByUsername;
|
||||
}
|
||||
return matchedByEmail;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected KeycloakId createEntity(User resource) throws UnexpectedScimDataException {
|
||||
String username = resource.getUserName().filter(StringUtils::isNotBlank)
|
||||
.orElseThrow(() -> new UnexpectedScimDataException(
|
||||
"Remote Scim user has empty username, can't create. Resource id = %s".formatted(resource.getId())));
|
||||
UserModel user = getKeycloakDao().addUser(username);
|
||||
resource.getEmails().stream().findFirst().flatMap(MultiComplexNode::getValue).ifPresent(user::setEmail);
|
||||
boolean userEnabled = resource.isActive().orElse(false);
|
||||
user.setEnabled(userEnabled);
|
||||
return new KeycloakId(user.getId());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isMarkedToIgnore(UserModel userModel) {
|
||||
return BooleanUtils.TRUE.equals(userModel.getFirstAttribute("scim-skip"));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected KeycloakId getId(UserModel userModel) {
|
||||
return new KeycloakId(userModel.getId());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected User scimRequestBodyForCreate(UserModel roleMapperModel) {
|
||||
String firstAndLastName = String.format("%s %s", StringUtils.defaultString(roleMapperModel.getFirstName()),
|
||||
StringUtils.defaultString(roleMapperModel.getLastName())).trim();
|
||||
String displayName = Objects.toString(firstAndLastName, roleMapperModel.getUsername());
|
||||
Stream<RoleModel> groupRoleModels = roleMapperModel.getGroupsStream().flatMap(RoleMapperModel::getRoleMappingsStream);
|
||||
Stream<RoleModel> roleModels = roleMapperModel.getRoleMappingsStream();
|
||||
Stream<RoleModel> allRoleModels = Stream.concat(groupRoleModels, roleModels);
|
||||
List<PersonRole> roles = allRoleModels.filter(r -> BooleanUtils.TRUE.equals(r.getFirstAttribute("scim")))
|
||||
.map(RoleModel::getName).map(roleName -> {
|
||||
PersonRole personRole = new PersonRole();
|
||||
personRole.setValue(roleName);
|
||||
return personRole;
|
||||
}).toList();
|
||||
User user = new User();
|
||||
user.setRoles(roles);
|
||||
user.setExternalId(roleMapperModel.getId());
|
||||
user.setUserName(roleMapperModel.getUsername());
|
||||
user.setDisplayName(displayName);
|
||||
Name name = new Name();
|
||||
name.setFamilyName(roleMapperModel.getLastName());
|
||||
name.setGivenName(roleMapperModel.getFirstName());
|
||||
user.setName(name);
|
||||
List<Email> emails = new ArrayList<>();
|
||||
if (roleMapperModel.getEmail() != null) {
|
||||
emails.add(Email.builder().value(roleMapperModel.getEmail()).build());
|
||||
}
|
||||
user.setEmails(emails);
|
||||
user.setActive(roleMapperModel.isEnabled());
|
||||
return user;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected User scimRequestBodyForUpdate(UserModel userModel, EntityOnRemoteScimId externalId) {
|
||||
User user = scimRequestBodyForCreate(userModel);
|
||||
user.setId(externalId.asString());
|
||||
Meta meta = newMetaLocation(externalId);
|
||||
user.setMeta(meta);
|
||||
return user;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean shouldIgnoreForScimSynchronization(UserModel userModel) {
|
||||
return "admin".equals(userModel.getUsername());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
package org.keycloak.federation.scim.event;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
import org.keycloak.models.GroupModel;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.models.KeycloakSessionFactory;
|
||||
import org.keycloak.models.RealmModel;
|
||||
import org.keycloak.models.utils.KeycloakModelUtils;
|
||||
import org.keycloak.timer.TimerProvider;
|
||||
import org.keycloak.federation.scim.core.ScimDispatcher;
|
||||
|
||||
import java.time.Duration;
|
||||
|
||||
/**
|
||||
* In charge of making background checks and sent UPDATE requests from group for which membership information has changed.
|
||||
* <p>
|
||||
* This is required to avoid immediate group membership updates which could cause to incorrect group members list in case of
|
||||
* concurrent group membership changes.
|
||||
*/
|
||||
public class ScimBackgroundGroupMembershipUpdater {
|
||||
public static final String GROUP_DIRTY_SINCE_ATTRIBUTE_NAME = "scim-dirty-since";
|
||||
|
||||
private static final Logger LOGGER = Logger.getLogger(ScimBackgroundGroupMembershipUpdater.class);
|
||||
// Update check loop will run every time this delay has passed
|
||||
private static final long UPDATE_CHECK_DELAY_MS = 2000;
|
||||
// If a group is marked dirty since less that this debounce delay, wait for the next update check loop
|
||||
private static final long DEBOUNCE_DELAY_MS = 1200;
|
||||
private final KeycloakSessionFactory sessionFactory;
|
||||
|
||||
public ScimBackgroundGroupMembershipUpdater(KeycloakSessionFactory sessionFactory) {
|
||||
this.sessionFactory = sessionFactory;
|
||||
}
|
||||
|
||||
public void startBackgroundUpdates() {
|
||||
// Every UPDATE_CHECK_DELAY_MS, check for dirty groups and send updates if required
|
||||
try (KeycloakSession keycloakSession = sessionFactory.create()) {
|
||||
TimerProvider timer = keycloakSession.getProvider(TimerProvider.class);
|
||||
timer.scheduleTask(taskSession -> {
|
||||
for (RealmModel realm : taskSession.realms().getRealmsStream().toList()) {
|
||||
dispatchDirtyGroupsUpdates(realm);
|
||||
}
|
||||
}, Duration.ofMillis(UPDATE_CHECK_DELAY_MS).toMillis(), "scim-background");
|
||||
}
|
||||
}
|
||||
|
||||
private void dispatchDirtyGroupsUpdates(RealmModel realm) {
|
||||
KeycloakModelUtils.runJobInTransaction(sessionFactory, session -> {
|
||||
session.getContext().setRealm(realm);
|
||||
ScimDispatcher dispatcher = new ScimDispatcher(session);
|
||||
// Identify groups marked as dirty by the ScimEventListenerProvider
|
||||
for (GroupModel group : session.groups().getGroupsStream(realm).filter(this::isDirtyGroup).toList()) {
|
||||
LOGGER.infof("[SCIM] Group %s is dirty, dispatch an update", group.getName());
|
||||
// If dirty : dispatch a group update to all clients and mark it clean
|
||||
dispatcher.dispatchGroupModificationToAll(client -> client.update(group));
|
||||
group.removeAttribute(GROUP_DIRTY_SINCE_ATTRIBUTE_NAME);
|
||||
}
|
||||
dispatcher.close();
|
||||
});
|
||||
}
|
||||
|
||||
private boolean isDirtyGroup(GroupModel g) {
|
||||
String groupDirtySinceAttribute = g.getFirstAttribute(GROUP_DIRTY_SINCE_ATTRIBUTE_NAME);
|
||||
try {
|
||||
long groupDirtySince = Long.parseLong(groupDirtySinceAttribute);
|
||||
// Must be dirty for more than DEBOUNCE_DELAY_MS
|
||||
// (otherwise update will be dispatched in next scheduled loop)
|
||||
return System.currentTimeMillis() - groupDirtySince > DEBOUNCE_DELAY_MS;
|
||||
} catch (NumberFormatException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,242 @@
|
|||
package org.keycloak.federation.scim.event;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
import org.keycloak.common.Profile;
|
||||
import org.keycloak.component.ComponentModel;
|
||||
import org.keycloak.events.Event;
|
||||
import org.keycloak.events.EventListenerProvider;
|
||||
import org.keycloak.events.EventType;
|
||||
import org.keycloak.events.admin.AdminEvent;
|
||||
import org.keycloak.events.admin.OperationType;
|
||||
import org.keycloak.events.admin.ResourceType;
|
||||
import org.keycloak.models.GroupModel;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.models.UserModel;
|
||||
import org.keycloak.federation.scim.core.ScimDispatcher;
|
||||
import org.keycloak.federation.scim.core.ScimEndpointConfigurationStorageProviderFactory;
|
||||
import org.keycloak.federation.scim.core.service.KeycloakDao;
|
||||
import org.keycloak.federation.scim.core.service.KeycloakId;
|
||||
import org.keycloak.federation.scim.core.service.ScimResourceType;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* An Event listener reacting to Keycloak models modification (e.g. User creation, Group deletion, membership modifications,
|
||||
* endpoint configuration change...) by propagating it to all registered Scim endpoints.
|
||||
*/
|
||||
public class ScimEventListenerProvider implements EventListenerProvider {
|
||||
|
||||
private static final Logger LOGGER = Logger.getLogger(ScimEventListenerProvider.class);
|
||||
|
||||
private final ScimDispatcher dispatcher;
|
||||
|
||||
private final KeycloakSession session;
|
||||
|
||||
private final KeycloakDao keycloakDao;
|
||||
|
||||
private final Map<ResourceType, Pattern> listenedEventPathPatterns = Map.of(ResourceType.USER,
|
||||
Pattern.compile("users/(.+)"), ResourceType.GROUP, Pattern.compile("groups/([\\w-]+)(/children)?"),
|
||||
ResourceType.GROUP_MEMBERSHIP, Pattern.compile("users/(.+)/groups/(.+)"), ResourceType.REALM_ROLE_MAPPING,
|
||||
Pattern.compile("^(.+)/(.+)/role-mappings"), ResourceType.COMPONENT, Pattern.compile("components/(.+)"));
|
||||
|
||||
public ScimEventListenerProvider(KeycloakSession session) {
|
||||
this.session = session;
|
||||
this.keycloakDao = new KeycloakDao(session);
|
||||
this.dispatcher = new ScimDispatcher(session);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onEvent(Event event) {
|
||||
if (Profile.isFeatureEnabled(Profile.Feature.SCIM)) {
|
||||
// React to User-related event : creation, deletion, update
|
||||
EventType eventType = event.getType();
|
||||
KeycloakId eventUserId = new KeycloakId(event.getUserId());
|
||||
switch (eventType) {
|
||||
case REGISTER -> {
|
||||
LOGGER.infof("[SCIM] Propagate User Registration - %s", eventUserId);
|
||||
UserModel user = getUser(eventUserId);
|
||||
dispatcher.dispatchUserModificationToAll(client -> client.create(user));
|
||||
}
|
||||
case UPDATE_EMAIL, UPDATE_PROFILE -> {
|
||||
LOGGER.infof("[SCIM] Propagate User %s - %s", eventType, eventUserId);
|
||||
UserModel user = getUser(eventUserId);
|
||||
dispatcher.dispatchUserModificationToAll(client -> client.update(user));
|
||||
}
|
||||
case DELETE_ACCOUNT -> {
|
||||
LOGGER.infof("[SCIM] Propagate User deletion - %s", eventUserId);
|
||||
dispatcher.dispatchUserModificationToAll(client -> client.delete(eventUserId));
|
||||
}
|
||||
default -> {
|
||||
// No other event has to be propagated to Scim endpoints
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onEvent(AdminEvent event, boolean includeRepresentation) {
|
||||
if (Profile.isFeatureEnabled(Profile.Feature.SCIM)) {
|
||||
// Step 1: check if event is relevant for propagation through SCIM
|
||||
Pattern pattern = listenedEventPathPatterns.get(event.getResourceType());
|
||||
if (pattern == null)
|
||||
return;
|
||||
Matcher matcher = pattern.matcher(event.getResourcePath());
|
||||
if (!matcher.find())
|
||||
return;
|
||||
|
||||
// Step 2: propagate event (if needed) according to its resource type
|
||||
switch (event.getResourceType()) {
|
||||
case USER -> {
|
||||
KeycloakId userId = new KeycloakId(matcher.group(1));
|
||||
handleUserEvent(event, userId);
|
||||
}
|
||||
case GROUP -> {
|
||||
KeycloakId groupId = new KeycloakId(matcher.group(1));
|
||||
handleGroupEvent(event, groupId);
|
||||
}
|
||||
case GROUP_MEMBERSHIP -> {
|
||||
KeycloakId userId = new KeycloakId(matcher.group(1));
|
||||
KeycloakId groupId = new KeycloakId(matcher.group(2));
|
||||
handleGroupMemberShipEvent(event, userId, groupId);
|
||||
}
|
||||
case REALM_ROLE_MAPPING -> {
|
||||
String rawResourceType = matcher.group(1);
|
||||
ScimResourceType type = switch (rawResourceType) {
|
||||
case "users" -> ScimResourceType.USER;
|
||||
case "groups" -> ScimResourceType.GROUP;
|
||||
default -> throw new IllegalArgumentException("Unsupported resource type: " + rawResourceType);
|
||||
};
|
||||
KeycloakId id = new KeycloakId(matcher.group(2));
|
||||
handleRoleMappingEvent(event, type, id);
|
||||
}
|
||||
case COMPONENT -> {
|
||||
String id = matcher.group(1);
|
||||
handleScimEndpointConfigurationEvent(event, id);
|
||||
|
||||
}
|
||||
default -> {
|
||||
// No other resource modification has to be propagated to Scim endpoints
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void handleUserEvent(AdminEvent userEvent, KeycloakId userId) {
|
||||
LOGGER.infof("[SCIM] Propagate User %s - %s", userEvent.getOperationType(), userId);
|
||||
switch (userEvent.getOperationType()) {
|
||||
case CREATE -> {
|
||||
UserModel user = getUser(userId);
|
||||
dispatcher.dispatchUserModificationToAll(client -> client.create(user));
|
||||
user.getGroupsStream()
|
||||
.forEach(group -> dispatcher.dispatchGroupModificationToAll(client -> client.update(group)));
|
||||
}
|
||||
case UPDATE -> {
|
||||
UserModel user = getUser(userId);
|
||||
dispatcher.dispatchUserModificationToAll(client -> client.update(user));
|
||||
}
|
||||
case DELETE -> dispatcher.dispatchUserModificationToAll(client -> client.delete(userId));
|
||||
default -> {
|
||||
// ACTION userEvent are not relevant, nothing to do
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Propagating the given group-related event to Scim endpoints.
|
||||
*
|
||||
* @param event the event to propagate
|
||||
* @param groupId event target's id
|
||||
*/
|
||||
private void handleGroupEvent(AdminEvent event, KeycloakId groupId) {
|
||||
LOGGER.infof("[SCIM] Propagate Group %s - %s", event.getOperationType(), groupId);
|
||||
switch (event.getOperationType()) {
|
||||
case CREATE -> {
|
||||
GroupModel group = getGroup(groupId);
|
||||
dispatcher.dispatchGroupModificationToAll(client -> client.create(group));
|
||||
}
|
||||
case UPDATE -> {
|
||||
GroupModel group = getGroup(groupId);
|
||||
dispatcher.dispatchGroupModificationToAll(client -> client.update(group));
|
||||
}
|
||||
case DELETE -> dispatcher.dispatchGroupModificationToAll(client -> client.delete(groupId));
|
||||
default -> {
|
||||
// ACTION event are not relevant, nothing to do
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void handleGroupMemberShipEvent(AdminEvent groupMemberShipEvent, KeycloakId userId, KeycloakId groupId) {
|
||||
LOGGER.infof("[SCIM] Propagate GroupMemberShip %s - User %s Group %s", groupMemberShipEvent.getOperationType(), userId,
|
||||
groupId);
|
||||
// Step 1: update USER immediately
|
||||
GroupModel group = getGroup(groupId);
|
||||
UserModel user = getUser(userId);
|
||||
dispatcher.dispatchUserModificationToAll(client -> client.update(user));
|
||||
|
||||
// Step 2: delayed GROUP update :
|
||||
// if several users are added to the group simultaneously in different Keycloack sessions
|
||||
// update the group in the context of the current session may not reflect those other changes
|
||||
// We trigger a delayed update by setting an attribute on the group (that will be handled by
|
||||
// ScimBackgroundGroupMembershipUpdaters)
|
||||
group.setSingleAttribute(ScimBackgroundGroupMembershipUpdater.GROUP_DIRTY_SINCE_ATTRIBUTE_NAME,
|
||||
"" + System.currentTimeMillis());
|
||||
}
|
||||
|
||||
private void handleRoleMappingEvent(AdminEvent roleMappingEvent, ScimResourceType type, KeycloakId id) {
|
||||
LOGGER.infof("[SCIM] Propagate RoleMapping %s - %s %s", roleMappingEvent.getOperationType(), type, id);
|
||||
switch (type) {
|
||||
case USER -> {
|
||||
UserModel user = getUser(id);
|
||||
dispatcher.dispatchUserModificationToAll(client -> client.update(user));
|
||||
}
|
||||
case GROUP -> {
|
||||
GroupModel group = getGroup(id);
|
||||
session.users().getGroupMembersStream(session.getContext().getRealm(), group)
|
||||
.forEach(user -> dispatcher.dispatchUserModificationToAll(client -> client.update(user)));
|
||||
}
|
||||
default -> {
|
||||
// No other type is relevant for propagation
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void handleScimEndpointConfigurationEvent(AdminEvent event, String id) {
|
||||
// In case of a component deletion
|
||||
if (event.getOperationType() == OperationType.DELETE) {
|
||||
// Check if it was a Scim endpoint configuration, and forward deletion if so
|
||||
Stream<ComponentModel> scimEndpointConfigurationsWithDeletedId = session.getContext().getRealm()
|
||||
.getComponentsStream()
|
||||
.filter(m -> ScimEndpointConfigurationStorageProviderFactory.ID.equals(m.getProviderId())
|
||||
&& id.equals(m.getId()));
|
||||
if (scimEndpointConfigurationsWithDeletedId.iterator().hasNext()) {
|
||||
LOGGER.infof("[SCIM] SCIM Endpoint configuration DELETE - %s ", id);
|
||||
dispatcher.refreshActiveScimEndpoints();
|
||||
}
|
||||
} else {
|
||||
// In case of CREATE or UPDATE, we can directly use the string representation
|
||||
// to check if it defines a SCIM endpoint (faster)
|
||||
if (event.getRepresentation() != null && event.getRepresentation().contains("\"providerId\":\"scim\"")) {
|
||||
LOGGER.infof("[SCIM] SCIM Endpoint configuration CREATE - %s ", id);
|
||||
dispatcher.refreshActiveScimEndpoints();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private UserModel getUser(KeycloakId id) {
|
||||
return keycloakDao.getUserById(id);
|
||||
}
|
||||
|
||||
private GroupModel getGroup(KeycloakId id) {
|
||||
return keycloakDao.getGroupById(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
dispatcher.close();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
package org.keycloak.federation.scim.event;
|
||||
|
||||
import org.keycloak.Config.Scope;
|
||||
import org.keycloak.events.EventListenerProvider;
|
||||
import org.keycloak.events.EventListenerProviderFactory;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.models.KeycloakSessionFactory;
|
||||
|
||||
public class ScimEventListenerProviderFactory implements EventListenerProviderFactory {
|
||||
|
||||
@Override
|
||||
public EventListenerProvider create(KeycloakSession session) {
|
||||
return new ScimEventListenerProvider(session);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getId() {
|
||||
return "scim";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init(Scope config) {
|
||||
// Nothing to initialize
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postInit(KeycloakSessionFactory factory) {
|
||||
// Nothing to initialize
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// Nothing to close
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,88 @@
|
|||
package org.keycloak.federation.scim.jpa;
|
||||
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.persistence.NoResultException;
|
||||
import jakarta.persistence.TypedQuery;
|
||||
import org.keycloak.connections.jpa.JpaConnectionProvider;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.federation.scim.core.service.EntityOnRemoteScimId;
|
||||
import org.keycloak.federation.scim.core.service.KeycloakId;
|
||||
import org.keycloak.federation.scim.core.service.ScimResourceType;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
public class ScimResourceDao {
|
||||
|
||||
private final String realmId;
|
||||
|
||||
private final String componentId;
|
||||
|
||||
private final EntityManager entityManager;
|
||||
|
||||
private ScimResourceDao(String realmId, String componentId, EntityManager entityManager) {
|
||||
this.realmId = realmId;
|
||||
this.componentId = componentId;
|
||||
this.entityManager = entityManager;
|
||||
}
|
||||
|
||||
public static ScimResourceDao newInstance(KeycloakSession keycloakSession, String componentId) {
|
||||
String realmId = keycloakSession.getContext().getRealm().getId();
|
||||
EntityManager entityManager = keycloakSession.getProvider(JpaConnectionProvider.class).getEntityManager();
|
||||
return new ScimResourceDao(realmId, componentId, entityManager);
|
||||
}
|
||||
|
||||
private EntityManager getEntityManager() {
|
||||
return entityManager;
|
||||
}
|
||||
|
||||
private String getRealmId() {
|
||||
return realmId;
|
||||
}
|
||||
|
||||
private String getComponentId() {
|
||||
return componentId;
|
||||
}
|
||||
|
||||
public void create(KeycloakId id, EntityOnRemoteScimId externalId, ScimResourceType type) {
|
||||
ScimResourceMapping entity = new ScimResourceMapping();
|
||||
entity.setType(type.name());
|
||||
entity.setExternalId(externalId.asString());
|
||||
entity.setComponentId(componentId);
|
||||
entity.setRealmId(realmId);
|
||||
entity.setId(id.asString());
|
||||
entityManager.persist(entity);
|
||||
}
|
||||
|
||||
private TypedQuery<ScimResourceMapping> getScimResourceTypedQuery(String queryName, String id, ScimResourceType type) {
|
||||
return getEntityManager().createNamedQuery(queryName, ScimResourceMapping.class).setParameter("type", type.name())
|
||||
.setParameter("realmId", getRealmId()).setParameter("componentId", getComponentId()).setParameter("id", id);
|
||||
}
|
||||
|
||||
public Optional<ScimResourceMapping> findByExternalId(EntityOnRemoteScimId externalId, ScimResourceType type) {
|
||||
try {
|
||||
return Optional.of(getScimResourceTypedQuery("findByExternalId", externalId.asString(), type).getSingleResult());
|
||||
} catch (NoResultException e) {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
public Optional<ScimResourceMapping> findById(KeycloakId keycloakId, ScimResourceType type) {
|
||||
try {
|
||||
return Optional.of(getScimResourceTypedQuery("findById", keycloakId.asString(), type).getSingleResult());
|
||||
} catch (NoResultException e) {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
public Optional<ScimResourceMapping> findUserById(KeycloakId id) {
|
||||
return findById(id, ScimResourceType.USER);
|
||||
}
|
||||
|
||||
public Optional<ScimResourceMapping> findUserByExternalId(EntityOnRemoteScimId externalId) {
|
||||
return findByExternalId(externalId, ScimResourceType.USER);
|
||||
}
|
||||
|
||||
public void delete(ScimResourceMapping resource) {
|
||||
entityManager.remove(resource);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
package org.keycloak.federation.scim.jpa;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ScimResourceId implements Serializable {
|
||||
private String id;
|
||||
private String realmId;
|
||||
private String componentId;
|
||||
private String type;
|
||||
private String externalId;
|
||||
|
||||
public ScimResourceId() {
|
||||
}
|
||||
|
||||
public ScimResourceId(String id, String realmId, String componentId, String type, String externalId) {
|
||||
this.setId(id);
|
||||
this.setRealmId(realmId);
|
||||
this.setComponentId(componentId);
|
||||
this.setType(type);
|
||||
this.setExternalId(externalId);
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getRealmId() {
|
||||
return realmId;
|
||||
}
|
||||
|
||||
public void setRealmId(String realmId) {
|
||||
this.realmId = realmId;
|
||||
}
|
||||
|
||||
public String getComponentId() {
|
||||
return componentId;
|
||||
}
|
||||
|
||||
public void setComponentId(String componentId) {
|
||||
this.componentId = componentId;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public String getExternalId() {
|
||||
return externalId;
|
||||
}
|
||||
|
||||
public void setExternalId(String externalId) {
|
||||
this.externalId = externalId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other)
|
||||
return true;
|
||||
if (!(other instanceof ScimResourceId o))
|
||||
return false;
|
||||
return (StringUtils.equals(o.id, id) && StringUtils.equals(o.realmId, realmId)
|
||||
&& StringUtils.equals(o.componentId, componentId) && StringUtils.equals(o.type, type)
|
||||
&& StringUtils.equals(o.externalId, externalId));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(realmId, componentId, type, id, externalId);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,88 @@
|
|||
package org.keycloak.federation.scim.jpa;
|
||||
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.Entity;
|
||||
import jakarta.persistence.Id;
|
||||
import jakarta.persistence.IdClass;
|
||||
import jakarta.persistence.NamedQueries;
|
||||
import jakarta.persistence.NamedQuery;
|
||||
import jakarta.persistence.Table;
|
||||
import org.keycloak.federation.scim.core.service.EntityOnRemoteScimId;
|
||||
import org.keycloak.federation.scim.core.service.KeycloakId;
|
||||
|
||||
@Entity
|
||||
@IdClass(ScimResourceId.class)
|
||||
@Table(name = "SCIM_RESOURCE_MAPPING")
|
||||
@NamedQueries({
|
||||
@NamedQuery(name = "findById", query = "from ScimResourceMapping where realmId = :realmId and componentId = :componentId and type = :type and id = :id"),
|
||||
@NamedQuery(name = "findByExternalId", query = "from ScimResourceMapping where realmId = :realmId and componentId = :componentId and type = :type and externalId = :id") })
|
||||
public class ScimResourceMapping {
|
||||
|
||||
@Id
|
||||
@Column(name = "ID", nullable = false)
|
||||
private String id;
|
||||
|
||||
@Id
|
||||
@Column(name = "REALM_ID", nullable = false)
|
||||
private String realmId;
|
||||
|
||||
@Id
|
||||
@Column(name = "COMPONENT_ID", nullable = false)
|
||||
private String componentId;
|
||||
|
||||
@Id
|
||||
@Column(name = "TYPE", nullable = false)
|
||||
private String type;
|
||||
|
||||
@Id
|
||||
@Column(name = "EXTERNAL_ID", nullable = false)
|
||||
private String externalId;
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getRealmId() {
|
||||
return realmId;
|
||||
}
|
||||
|
||||
public void setRealmId(String realmId) {
|
||||
this.realmId = realmId;
|
||||
}
|
||||
|
||||
public String getComponentId() {
|
||||
return componentId;
|
||||
}
|
||||
|
||||
public void setComponentId(String componentId) {
|
||||
this.componentId = componentId;
|
||||
}
|
||||
|
||||
public String getExternalId() {
|
||||
return externalId;
|
||||
}
|
||||
|
||||
public void setExternalId(String externalId) {
|
||||
this.externalId = externalId;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public KeycloakId getIdAsKeycloakId() {
|
||||
return new KeycloakId(id);
|
||||
}
|
||||
|
||||
public EntityOnRemoteScimId getExternalIdAsEntityOnRemoteScimId() {
|
||||
return new EntityOnRemoteScimId(externalId);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
package org.keycloak.federation.scim.jpa;
|
||||
|
||||
import org.keycloak.connections.jpa.entityprovider.JpaEntityProvider;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
public class ScimResourceProvider implements JpaEntityProvider {
|
||||
|
||||
@Override
|
||||
public List<Class<?>> getEntities() {
|
||||
return Collections.singletonList(ScimResourceMapping.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getChangelogLocation() {
|
||||
return "META-INF/scim-resource-changelog.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// Nothing to close
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFactoryId() {
|
||||
return ScimResourceProviderFactory.ID;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
package org.keycloak.federation.scim.jpa;
|
||||
|
||||
import org.keycloak.Config.Scope;
|
||||
import org.keycloak.connections.jpa.entityprovider.JpaEntityProvider;
|
||||
import org.keycloak.connections.jpa.entityprovider.JpaEntityProviderFactory;
|
||||
import org.keycloak.models.KeycloakSession;
|
||||
import org.keycloak.models.KeycloakSessionFactory;
|
||||
|
||||
public class ScimResourceProviderFactory implements JpaEntityProviderFactory {
|
||||
|
||||
static final String ID = "scim-resource";
|
||||
|
||||
@Override
|
||||
public JpaEntityProvider create(KeycloakSession session) {
|
||||
return new ScimResourceProvider();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getId() {
|
||||
return ID;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init(Scope scope) {
|
||||
// Nothing to initialise
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postInit(KeycloakSessionFactory sessionFactory) {
|
||||
// Nothing to do
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// Nothing to close
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<jboss-deployment-structure>
|
||||
<deployment>
|
||||
<dependencies>
|
||||
<module name="org.keycloak.keycloak-services" />
|
||||
<module name="org.keycloak.keycloak-model-jpa" />
|
||||
<module name="org.hibernate" />
|
||||
</dependencies>
|
||||
</deployment>
|
||||
</jboss-deployment-structure>
|
|
@ -0,0 +1,35 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.1.xsd">
|
||||
<changeSet author="contact@indiehosters.net" id="scim-resource-1.0">
|
||||
|
||||
<createTable tableName="SCIM_RESOURCE_MAPPING">
|
||||
<column name="ID" type="VARCHAR(36)">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<column name="REALM_ID" type="VARCHAR(36)">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<column name="TYPE" type="VARCHAR(36)">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<column name="COMPONENT_ID" type="VARCHAR(36)">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<column name="EXTERNAL_ID" type="VARCHAR(36)">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
</createTable>
|
||||
|
||||
<addPrimaryKey constraintName="PK_SCIM_RESOURCE_MAPPING" tableName="SCIM_RESOURCE_MAPPING"
|
||||
columnNames="ID,REALM_ID,TYPE,COMPONENT_ID,EXTERNAL_ID"/>
|
||||
<addForeignKeyConstraint baseTableName="SCIM_RESOURCE_MAPPING" baseColumnNames="REALM_ID"
|
||||
constraintName="FK_SCIM_RESOURCE_MAPPING_REALM" referencedTableName="REALM"
|
||||
referencedColumnNames="ID" onDelete="CASCADE" onUpdate="CASCADE"/>
|
||||
<addForeignKeyConstraint baseTableName="SCIM_RESOURCE_MAPPING" baseColumnNames="COMPONENT_ID"
|
||||
constraintName="FK_SCIM_RESOURCE_MAPPING_COMPONENT" referencedTableName="COMPONENT"
|
||||
referencedColumnNames="ID" onDelete="CASCADE" onUpdate="CASCADE"/>
|
||||
</changeSet>
|
||||
|
||||
</databaseChangeLog>
|
|
@ -0,0 +1 @@
|
|||
org.keycloak.federation.scim.jpa.ScimResourceProviderFactory
|
|
@ -0,0 +1 @@
|
|||
org.keycloak.federation.scim.event.ScimEventListenerProviderFactory
|
|
@ -0,0 +1 @@
|
|||
org.keycloak.federation.scim.core.ScimEndpointConfigurationStorageProviderFactory
|
|
@ -5,7 +5,7 @@
|
|||
<base href="${resourceUrl}/">
|
||||
<link rel="icon" type="${properties.favIconType!'image/svg+xml'}" href="${resourceUrl}${properties.favIcon!'/favicon.svg'}">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="color-scheme" content="light${(properties.darkMode)?boolean?then(' dark', '')}">
|
||||
<meta name="color-scheme" content="light${darkMode?then(' dark', '')}">
|
||||
<meta name="description" content="${properties.description!'The Account Console is a web-based interface for managing your account.'}">
|
||||
<title>${properties.title!'Account Management'}</title>
|
||||
<style>
|
||||
|
@ -58,7 +58,7 @@
|
|||
}
|
||||
}
|
||||
</script>
|
||||
<#if properties.darkMode?boolean>
|
||||
<#if darkMode>
|
||||
<script type="module" async blocking="render">
|
||||
const DARK_MODE_CLASS = "${properties.kcDarkModeClass}";
|
||||
const mediaQuery = window.matchMedia("(prefers-color-scheme: dark)");
|
||||
|
|
|
@ -75,7 +75,6 @@ linkedAccounts=Linked accounts
|
|||
personalInfoDescription=Manage your basic information
|
||||
removeAccess=Remove access
|
||||
signingInDescription=Configure ways to sign in.
|
||||
somethingWentWrongDescription=Sorry, an unexpected error has occurred.
|
||||
personalInfo=Personal info
|
||||
removeCred=Remove {{name}}
|
||||
signOutAllDevices=Sign out all devices
|
||||
|
@ -98,10 +97,11 @@ permissionRequest=Permission requests - {{name}}
|
|||
add=Add
|
||||
error-invalid-value='{{0}}' has invalid value.
|
||||
somethingWentWrong=Something went wrong
|
||||
somethingWentWrongDescription=Sorry, an unexpected error has occurred.
|
||||
tryAgain=Try again
|
||||
rolesScope=If there is no role scope mapping defined, each user is permitted to use this client scope. If there are role scope mappings defined, the user must be a member of at least one of the roles.
|
||||
unShareError=Could not un-share the resource due to\: {{error}}
|
||||
ipAddress=IP address
|
||||
tryAgain=Try again
|
||||
resourceName=Resource name
|
||||
unlinkedEmpty=No unlinked providers
|
||||
done=Done
|
||||
|
@ -213,4 +213,5 @@ emptyUserOrganizationsInstructions=You have not joined any organizations yet.
|
|||
searchOrganization=Search for organization
|
||||
organizationList=List of organizations
|
||||
domains=Domains
|
||||
refresh=Refresh
|
||||
refresh=Refresh
|
||||
termsAndConditionsDeclined=You need to accept the Terms and Conditions to continue
|
|
@ -28,7 +28,7 @@
|
|||
"@patternfly/patternfly": "^5.4.1",
|
||||
"@patternfly/react-core": "^5.4.8",
|
||||
"@patternfly/react-icons": "^5.4.2",
|
||||
"@patternfly/react-table": "^5.4.8",
|
||||
"@patternfly/react-table": "^5.4.9",
|
||||
"i18next": "^23.16.4",
|
||||
"i18next-http-backend": "^2.6.2",
|
||||
"keycloak-js": "workspace:*",
|
||||
|
@ -41,12 +41,12 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"@keycloak/keycloak-admin-client": "workspace:*",
|
||||
"@playwright/test": "^1.48.1",
|
||||
"@playwright/test": "^1.48.2",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/react": "^18.3.12",
|
||||
"@types/react-dom": "^18.3.1",
|
||||
"@vitejs/plugin-react-swc": "^3.7.1",
|
||||
"lightningcss": "^1.27.0",
|
||||
"lightningcss": "^1.28.1",
|
||||
"vite": "^5.4.10",
|
||||
"vite-plugin-checker": "^0.8.0",
|
||||
"vite-plugin-dts": "^4.3.0"
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
export { PersonalInfo } from "./personal-info/PersonalInfo";
|
||||
export { ErrorPage } from "./root/ErrorPage";
|
||||
export { Header } from "./root/Header";
|
||||
export { PageNav } from "./root/PageNav";
|
||||
export { DeviceActivity } from "./account-security/DeviceActivity";
|
||||
|
|
|
@ -1,65 +0,0 @@
|
|||
import {
|
||||
Button,
|
||||
Modal,
|
||||
ModalVariant,
|
||||
Page,
|
||||
Text,
|
||||
TextContent,
|
||||
TextVariants,
|
||||
} from "@patternfly/react-core";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { isRouteErrorResponse, useRouteError } from "react-router-dom";
|
||||
|
||||
type ErrorPageProps = {
|
||||
error?: unknown;
|
||||
};
|
||||
|
||||
export const ErrorPage = (props: ErrorPageProps) => {
|
||||
const { t } = useTranslation();
|
||||
const error = useRouteError() ?? props.error;
|
||||
const errorMessage = getErrorMessage(error);
|
||||
|
||||
function onRetry() {
|
||||
location.href = location.origin + location.pathname;
|
||||
}
|
||||
|
||||
return (
|
||||
<Page>
|
||||
<Modal
|
||||
variant={ModalVariant.small}
|
||||
title={t("somethingWentWrong")}
|
||||
titleIconVariant="danger"
|
||||
showClose={false}
|
||||
isOpen
|
||||
actions={[
|
||||
<Button key="tryAgain" variant="primary" onClick={onRetry}>
|
||||
{t("tryAgain")}
|
||||
</Button>,
|
||||
]}
|
||||
>
|
||||
<TextContent>
|
||||
<Text>{t("somethingWentWrongDescription")}</Text>
|
||||
{errorMessage && (
|
||||
<Text component={TextVariants.small}>{errorMessage}</Text>
|
||||
)}
|
||||
</TextContent>
|
||||
</Modal>
|
||||
</Page>
|
||||
);
|
||||
};
|
||||
|
||||
function getErrorMessage(error: unknown): string | null {
|
||||
if (typeof error === "string") {
|
||||
return error;
|
||||
}
|
||||
|
||||
if (isRouteErrorResponse(error)) {
|
||||
return error.statusText;
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
|
@ -1,10 +1,9 @@
|
|||
import { lazy } from "react";
|
||||
import type { IndexRouteObject, RouteObject } from "react-router-dom";
|
||||
|
||||
import { environment } from "./environment";
|
||||
import { Organizations } from "./organizations/Organizations";
|
||||
import { ErrorPage } from "./root/ErrorPage";
|
||||
import { Root } from "./root/Root";
|
||||
import { ErrorPage } from "@keycloak/keycloak-ui-shared";
|
||||
|
||||
const DeviceActivity = lazy(() => import("./account-security/DeviceActivity"));
|
||||
const LinkedAccounts = lazy(() => import("./account-security/LinkedAccounts"));
|
||||
|
@ -85,7 +84,7 @@ export const RootRoute: RouteObject = {
|
|||
PersonalInfoRoute,
|
||||
ResourcesRoute,
|
||||
ContentRoute,
|
||||
Oid4VciRoute,
|
||||
...(environment.features.isOid4VciEnabled ? [Oid4VciRoute] : []),
|
||||
],
|
||||
};
|
||||
|
||||
|
|
|
@ -119,7 +119,7 @@ describe("Group test", () => {
|
|||
.assertNoSearchResultsMessageExist(true);
|
||||
});
|
||||
|
||||
it("Duplicate group", () => {
|
||||
it.skip("Duplicate group from item bar", () => {
|
||||
groupPage
|
||||
.duplicateGroupItem(groupNames[0], true)
|
||||
.assertNotificationGroupDuplicated();
|
||||
|
|
|
@ -74,7 +74,7 @@ export default class RoleMappingTab {
|
|||
selectRow(name: string, modal = false) {
|
||||
cy.get(modal ? ".pf-v5-c-modal-box " : "" + this.#namesColumn)
|
||||
.contains(name)
|
||||
.parent()
|
||||
.parents("tr")
|
||||
.within(() => {
|
||||
cy.get("input").click();
|
||||
});
|
||||
|
|
|
@ -21,7 +21,7 @@ export default class AssociatedRolesPage {
|
|||
|
||||
cy.get(this.#addRoleTable)
|
||||
.contains(roleName)
|
||||
.parent()
|
||||
.parents("tr")
|
||||
.within(() => {
|
||||
cy.get("input").click();
|
||||
});
|
||||
|
@ -49,7 +49,7 @@ export default class AssociatedRolesPage {
|
|||
|
||||
cy.get(this.#addRoleTable)
|
||||
.contains(roleName)
|
||||
.parent()
|
||||
.parents("tr")
|
||||
.within(() => {
|
||||
cy.get("input").click();
|
||||
});
|
||||
|
@ -67,7 +67,7 @@ export default class AssociatedRolesPage {
|
|||
|
||||
cy.get(this.#addRoleTable)
|
||||
.contains(roleName)
|
||||
.parent()
|
||||
.parents("tr")
|
||||
.within(() => {
|
||||
cy.get("input").click();
|
||||
});
|
||||
|
|
|
@ -24,13 +24,13 @@ export default class RealmSettingsPage extends CommonPage {
|
|||
userProfileTab = "rs-user-profile-tab";
|
||||
tokensTab = "rs-tokens-tab";
|
||||
selectLoginTheme = "#kc-login-theme";
|
||||
loginThemeList = "[data-testid='select-login-theme']";
|
||||
loginThemeList = "[data-testid='select-loginTheme']";
|
||||
selectAccountTheme = "#kc-account-theme";
|
||||
accountThemeList = "[data-testid='select-account-theme']";
|
||||
accountThemeList = "[data-testid='select-accountTheme']";
|
||||
selectAdminTheme = "#kc-admin-ui-theme";
|
||||
adminThemeList = "[data-testid='select-admin-theme']";
|
||||
adminThemeList = "[data-testid='select-adminTheme']";
|
||||
selectEmailTheme = "#kc-email-theme";
|
||||
emailThemeList = "[data-testid='select-email-theme']";
|
||||
emailThemeList = "[data-testid='select-emailTheme']";
|
||||
ssoSessionIdleSelectMenu = "#kc-sso-session-idle-select-menu";
|
||||
ssoSessionIdleSelectMenuList = "#kc-sso-session-idle-select-menu ul";
|
||||
ssoSessionMaxSelectMenu = "#kc-sso-session-max-select-menu";
|
||||
|
|
|
@ -40,7 +40,7 @@ export default class UserRegistration {
|
|||
selectRow(name: string) {
|
||||
cy.get(this.#namesColumn)
|
||||
.contains(name)
|
||||
.parent()
|
||||
.parents("tr")
|
||||
.within(() => {
|
||||
cy.get("input").click();
|
||||
});
|
||||
|
|
|
@ -591,7 +591,6 @@ hour=時
|
|||
connectionTimeoutHelp=LDAP接続タイムアウト(ミリ秒単位)
|
||||
defaultSigAlgHelp=このレルムでトークンの署名に使用されるデフォルトのアルゴリズム
|
||||
save-admin-eventsHelp=有効の場合は、管理イベントがデータベースに保存され、管理コンソールで使用可能になります。
|
||||
policyGroups=どのユーザーがこのポリシーで許可されるか指定してください。
|
||||
forwardParametersHelp=最初のアプリケーションへのリクエストから取得し、外部IDPの認可エンドポイントへ転送されるOpenID Connect/OAuth標準以外のクエリー・パラメーター。複数のパラメーターをカンマ(,)で区切って入力できます。
|
||||
on=オン
|
||||
webAuthnPolicyRpId=リライング・パーティー・エンティティーID
|
||||
|
@ -804,3 +803,4 @@ resourceNameHelp=このリソースの一意な名前。この名前は、リソ
|
|||
duplicateEmailsAllowed=メールの重複
|
||||
policyClientHelp=このポリシーで許可されるクライアントを指定します。
|
||||
clientAuthenticatorTypeHelp=Keycloakサーバーに対してこのクライアントの認証に使用するクライアント認証方式を設定します。
|
||||
policyGroupsHelp=どのユーザーがこのポリシーで許可されるか指定してください。
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<base href="${resourceUrl}/">
|
||||
<link rel="icon" type="${properties.favIconType!'image/svg+xml'}" href="${resourceUrl}${properties.favIcon!'/favicon.svg'}">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="color-scheme" content="light${(properties.darkMode)?boolean?then(' dark', '')}">
|
||||
<meta name="color-scheme" content="light${darkMode?then(' dark', '')}">
|
||||
<meta name="description" content="${properties.description!'The Keycloak Administration Console is a web-based interface for managing Keycloak.'}">
|
||||
<title>${properties.title!'Keycloak Administration Console'}</title>
|
||||
<style>
|
||||
|
@ -15,6 +15,8 @@
|
|||
|
||||
body, #app {
|
||||
height: 100%;
|
||||
overflow-x: hidden;
|
||||
overflow-y: hidden;
|
||||
}
|
||||
|
||||
.container {
|
||||
|
@ -58,7 +60,7 @@
|
|||
}
|
||||
}
|
||||
</script>
|
||||
<#if properties.darkMode?boolean>
|
||||
<#if darkMode>
|
||||
<script type="module" async blocking="render">
|
||||
const DARK_MODE_CLASS = "${properties.kcDarkModeClass}";
|
||||
const mediaQuery = window.matchMedia("(prefers-color-scheme: dark)");
|
||||
|
|
|
@ -231,7 +231,7 @@ eventTypes.USER_DISABLED_BY_TEMPORARY_LOCKOUT_ERROR.name=User disabled by tempor
|
|||
deleteUser=Delete user
|
||||
addedNodeSuccess=Node successfully added
|
||||
eventTypes.INTROSPECT_TOKEN_ERROR.description=Introspect token error
|
||||
webAuthnPolicyUserVerificationRequirementHelp=Communicates to an authenticator to confirm actually verifying a user.
|
||||
webAuthnPolicyUserVerificationRequirementHelp=Communicates to an authenticator whether to require to verify a user.
|
||||
syncModes.import=Import
|
||||
realmSaveError=Realm could not be updated\: {{error}}
|
||||
authDataDescription=Represents a token carrying authorization data as a result of the processing of an authorization request. This representation is basically what Keycloak issues to clients asking for permission. Check the `authorization` claim for the permissions that where granted based on the current authorization request.
|
||||
|
@ -418,7 +418,7 @@ x509CertificateHelp=X509 Certificate encoded in PEM format
|
|||
samlEndpointsLabel=SAML 2.0 Service Provider Metadata
|
||||
passCurrentLocaleHelp=Pass the current locale to the identity provider as a ui_locales parameter.
|
||||
lessThan=Must be less than {{value}}
|
||||
webAuthnPolicyRequireResidentKeyHelp=It tells an authenticator create a public key credential as Discoverable Credential or not.
|
||||
webAuthnPolicyRequireResidentKeyHelp=It tells an authenticator whether to create a public key credential as a Discoverable Credential.
|
||||
logoutServiceRedirectBindingURL=Logout Service Redirect Binding URL
|
||||
createIdentityProviderSuccess=Identity provider successfully created
|
||||
emptyMappersInstructions=If you want to add mappers, please click the button below to add some predefined mappers or to configure a new mapper.
|
||||
|
@ -689,7 +689,7 @@ clientPolicySearch=Search client policy
|
|||
refreshTokens=Refresh tokens
|
||||
eventTypes.UPDATE_EMAIL_ERROR.description=Update email error
|
||||
credentials=Credentials
|
||||
webAuthnPolicyCreateTimeoutHelp=Timeout value for creating user's public key credential in seconds. if set to 0, this timeout option is not adapted.
|
||||
webAuthnPolicyCreateTimeoutHelp=The timeout value for creating the user's public key credential in seconds. If set to 0, this timeout option is not adapted.
|
||||
policyType.hotp=Counter based
|
||||
claimFilterValue=Essential claim value
|
||||
eventTypes.REGISTER_ERROR.name=Register error
|
||||
|
@ -1250,7 +1250,7 @@ realmRoles=Realm roles
|
|||
fineGrainOpenIdConnectConfigurationHelp=This section is used to configure advanced settings of this client related to OpenID Connect protocol.
|
||||
searchForUserDescription=This realm may have a federated provider. Viewing all users may cause the system to slow down, but it can be done by searching for "*". Please search for a user above.
|
||||
expirationHelp=Sets the expiration for events. Expired events are periodically deleted from the database.
|
||||
webAuthnPolicySignatureAlgorithmsHelp=What signature algorithms should be used for Authentication Assertion.
|
||||
webAuthnPolicySignatureAlgorithmsHelp=The signature algorithms that should be used for the Authentication Assertion.
|
||||
setToNowError=Error\! Failed to set notBefore to current date and time: {{error}}
|
||||
eventTypes.UNREGISTER_NODE_ERROR.description=Unregister node error
|
||||
clientScopeTypes.optional=Optional
|
||||
|
@ -1272,7 +1272,7 @@ revoke=Revoke
|
|||
admin=Admin
|
||||
syncUsersError=Could not sync users\: '{{error}}'
|
||||
generatedAccessTokenHelp=See the example access token, which will be generated and sent to the client when selected user is authenticated. You can see claims and roles that the token will contain based on the effective protocol mappers and role scope mappings and also based on the claims/roles assigned to user himself
|
||||
webAuthnPolicyAcceptableAaguidsHelp=The list of AAGUID of which an authenticator can be registered.
|
||||
webAuthnPolicyAcceptableAaguidsHelp=The list of allowed AAGUIDs of which an authenticator can be registered. An AAGUID is a 128-bit identifier indicating the authenticator's type (e.g., make and model).
|
||||
keyPasswordHelp=Password for the private key
|
||||
frontchannelLogout=Front channel logout
|
||||
clientUpdaterTrustedHostsTooltip=List of Hosts, which are trusted. In case that client registration/update request comes from the host/domain specified in this configuration, condition evaluates to true. You can use hostnames or IP addresses. If you use star at the beginning (for example '*.example.com' ) then whole domain example.com will be trusted.
|
||||
|
@ -1721,7 +1721,7 @@ mappedGroupAttributes=Mapped group attributes
|
|||
localization=Localization
|
||||
importConfig=Import config from file
|
||||
replyToDisplayNameHelp=A user-friendly name for the 'Reply-To' address (optional).
|
||||
webAuthnPolicyRpIdHelp=This is ID as WebAuthn Relying Party. It must be origin's effective domain.
|
||||
webAuthnPolicyRpIdHelp=The WebAuthn Relying Party ID (RpID). It must be the origin's effective domain, e.g. 'company.com' or 'auth.company.com'.
|
||||
signingKeysConfigExplain=If you enable the "Client signature required" below, you must configure the signing keys by generating or importing keys, and the client will sign their saml requests and responses. The signature will be validated.
|
||||
newClientProfile=Create client profile
|
||||
consoleDisplayConnectionUrlHelp=Connection URL to your LDAP server
|
||||
|
@ -2853,7 +2853,7 @@ credentialData=Data
|
|||
clientRolesConditionTooltip=Client roles, which will be checked during this condition evaluation. Condition evaluates to true if client has at least one client role with the name as the client roles specified in the configuration.
|
||||
invalidateSecret=Invalidate
|
||||
emptyPermissionInstructions=If you want to create a permission, please click the button below to create a resource-based or scope-based permission.
|
||||
webAuthnPolicyAvoidSameAuthenticatorRegisterHelp=Avoid registering the authenticator that has already been registered.
|
||||
webAuthnPolicyAvoidSameAuthenticatorRegisterHelp=Avoid registering an authenticator that has already been registered.
|
||||
memberofLdapAttribute=Member-of LDAP attribute
|
||||
supportedLocales=Supported locales
|
||||
showPasswordDataValue=Value
|
||||
|
@ -2936,7 +2936,7 @@ clientSecretHelp=The client secret registered with the identity provider. This f
|
|||
offlineSessionMax=Offline Session Max
|
||||
generatedUserInfoHelp=See the example User Info, which will be provided by the User Info Endpoint
|
||||
dynamicScopeFormat=Dynamic scope format
|
||||
webAuthnPolicyExtraOriginsHelp=The list of extra origin for non-web application.
|
||||
webAuthnPolicyExtraOriginsHelp=The list of extra origins for non-web applications.
|
||||
updatePermissionSuccess=Successfully updated the permission
|
||||
idpLinkSuccess=Identity provider has been linked
|
||||
removeAnnotationText=Remove annotation
|
||||
|
@ -3165,6 +3165,8 @@ logo=Logo
|
|||
avatarImage=Avatar image
|
||||
organizationsEnabled=Organizations
|
||||
organizationsEnabledHelp=If enabled, allows managing organizations. Otherwise, existing organizations are still kept but you will not be able to manage them anymore or authenticate their members.
|
||||
verifiableCredentialsEnabled=Verifiable Credentials
|
||||
verifiableCredentialsEnabledHelp=If enabled, allows managing verifiable credentials in this realm.
|
||||
organizations=Organizations
|
||||
organizationDetails=Organization details
|
||||
organizationsList=Organizations
|
||||
|
@ -3273,7 +3275,24 @@ groupDuplicated=Group duplicated
|
|||
duplicateAGroup=Duplicate group
|
||||
couldNotFetchClientRoleMappings=Could not fetch client role mappings\: {{error}}
|
||||
duplicateGroupWarning=Duplication of groups with a large number of subgroups is not supported. Please ensure that the group you are duplicating does not have a large number of subgroups.
|
||||
darkModeEnabled=Dark mode
|
||||
darkModeEnabledHelp=If enabled the dark variant of the theme will be applied based on user preference through an operating system setting (e.g. light or dark mode) or a user agent setting, if disabled only the light variant will be used. This setting only applies to themes that support dark and light variants, on themes that do not support this feature it will have no effect.
|
||||
showMemberships=Show memberships
|
||||
showMembershipsTitle={{username}} Group Memberships
|
||||
noGroupMembershipsText=This user is not a member of any groups.
|
||||
noGroupMemberships=No memberships
|
||||
termsAndConditionsDeclined=You need to accept the Terms and Conditions to continue
|
||||
somethingWentWrong=Something went wrong
|
||||
somethingWentWrongDescription=Sorry, an unexpected error has occurred.
|
||||
tryAgain=Try again
|
||||
errorSavingTranslations=Error saving translations\: '{{error}}'
|
||||
clearCachesTitle=Clear Caches
|
||||
realmCache=Realm Cache
|
||||
userCache=User Cache
|
||||
keysCache=Keys Cache
|
||||
clearButtonTitle=Clear
|
||||
clearRealmCacheHelp=This will clear entries for all realms.
|
||||
clearUserCacheHelp=This will clear entries for all realms.
|
||||
clearKeysCacheHelp=Clears all entries from the cache of external public keys. These are keys of external clients or identity providers. This will clear all entries for all realms.
|
||||
clearCacheSuccess=Cache cleared successfully
|
||||
clearCacheError=Could not clear cache\: {{error}}
|
||||
|
|
|
@ -74,11 +74,11 @@
|
|||
"@keycloak/keycloak-admin-client": "workspace:*",
|
||||
"@keycloak/keycloak-ui-shared": "workspace:*",
|
||||
"@patternfly/patternfly": "^5.4.1",
|
||||
"@patternfly/react-code-editor": "^5.4.10",
|
||||
"@patternfly/react-code-editor": "^5.4.11",
|
||||
"@patternfly/react-core": "^5.4.8",
|
||||
"@patternfly/react-icons": "^5.4.2",
|
||||
"@patternfly/react-styles": "^5.4.1",
|
||||
"@patternfly/react-table": "^5.4.8",
|
||||
"@patternfly/react-table": "^5.4.9",
|
||||
"admin-ui": "file:",
|
||||
"dagre": "^0.8.5",
|
||||
"file-saver": "^2.0.5",
|
||||
|
@ -101,7 +101,7 @@
|
|||
"@4tw/cypress-drag-drop": "^2.2.5",
|
||||
"@testing-library/cypress": "^10.0.2",
|
||||
"@testing-library/dom": "^10.4.0",
|
||||
"@testing-library/jest-dom": "^6.6.2",
|
||||
"@testing-library/jest-dom": "^6.6.3",
|
||||
"@testing-library/react": "^16.0.1",
|
||||
"@types/dagre": "^0.7.52",
|
||||
"@types/file-saver": "^2.0.7",
|
||||
|
@ -110,12 +110,12 @@
|
|||
"@types/react-dom": "^18.3.1",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@vitejs/plugin-react-swc": "^3.7.1",
|
||||
"cypress": "^13.15.1",
|
||||
"cypress": "^13.15.2",
|
||||
"cypress-axe": "^1.5.0",
|
||||
"cypress-split": "^1.24.5",
|
||||
"jsdom": "^25.0.1",
|
||||
"ldap-server-mock": "^6.0.1",
|
||||
"lightningcss": "^1.27.0",
|
||||
"lightningcss": "^1.28.1",
|
||||
"ts-node": "^10.9.2",
|
||||
"uuid": "^11.0.2",
|
||||
"vite": "^5.4.10",
|
||||
|
|
|
@ -23,6 +23,9 @@ import { HelpHeader } from "./components/help-enabler/HelpHeader";
|
|||
import { useRealm } from "./context/realm-context/RealmContext";
|
||||
import { useWhoAmI } from "./context/whoami/WhoAmI";
|
||||
import { toDashboard } from "./dashboard/routes/Dashboard";
|
||||
import useToggle from "./utils/useToggle";
|
||||
import { PageHeaderClearCachesModal } from "./PageHeaderClearCachesModal";
|
||||
import { useAccess } from "./context/access/Access";
|
||||
|
||||
const ManageAccountDropdownItem = () => {
|
||||
const { keycloak } = useEnvironment();
|
||||
|
@ -67,6 +70,20 @@ const ServerInfoDropdownItem = () => {
|
|||
);
|
||||
};
|
||||
|
||||
const ClearCachesDropdownItem = () => {
|
||||
const { t } = useTranslation();
|
||||
const [open, toggleModal] = useToggle();
|
||||
|
||||
return (
|
||||
<>
|
||||
<DropdownItem key="clear caches" onClick={() => toggleModal()}>
|
||||
{t("clearCachesTitle")}
|
||||
</DropdownItem>
|
||||
{open && <PageHeaderClearCachesModal onClose={() => toggleModal()} />}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const HelpDropdownItem = () => {
|
||||
const { t } = useTranslation();
|
||||
const { enabled, toggleHelp } = useHelp();
|
||||
|
@ -81,23 +98,34 @@ const HelpDropdownItem = () => {
|
|||
);
|
||||
};
|
||||
|
||||
const kebabDropdownItems = [
|
||||
const kebabDropdownItems = (isMasterRealm: boolean, isManager: boolean) => [
|
||||
<ManageAccountDropdownItem key="kebab Manage Account" />,
|
||||
<ServerInfoDropdownItem key="kebab Server Info" />,
|
||||
...(isMasterRealm && isManager
|
||||
? [<ClearCachesDropdownItem key="Clear Caches" />]
|
||||
: []),
|
||||
<HelpDropdownItem key="kebab Help" />,
|
||||
<Divider component="li" key="kebab sign out separator" />,
|
||||
<SignOutDropdownItem key="kebab Sign out" />,
|
||||
];
|
||||
|
||||
const userDropdownItems = [
|
||||
const userDropdownItems = (isMasterRealm: boolean, isManager: boolean) => [
|
||||
<ManageAccountDropdownItem key="Manage Account" />,
|
||||
<ServerInfoDropdownItem key="Server info" />,
|
||||
...(isMasterRealm && isManager
|
||||
? [<ClearCachesDropdownItem key="Clear Caches" />]
|
||||
: []),
|
||||
<Divider component="li" key="sign out separator" />,
|
||||
<SignOutDropdownItem key="Sign out" />,
|
||||
];
|
||||
|
||||
const KebabDropdown = () => {
|
||||
const [isDropdownOpen, setDropdownOpen] = useState(false);
|
||||
const { realm } = useRealm();
|
||||
const { hasAccess } = useAccess();
|
||||
|
||||
const isMasterRealm = realm === "master";
|
||||
const isManager = hasAccess("manage-realm");
|
||||
|
||||
return (
|
||||
<Dropdown
|
||||
|
@ -116,7 +144,9 @@ const KebabDropdown = () => {
|
|||
)}
|
||||
isOpen={isDropdownOpen}
|
||||
>
|
||||
<DropdownList>{kebabDropdownItems}</DropdownList>
|
||||
<DropdownList>
|
||||
{kebabDropdownItems(isMasterRealm, isManager)}
|
||||
</DropdownList>
|
||||
</Dropdown>
|
||||
);
|
||||
};
|
||||
|
@ -124,6 +154,11 @@ const KebabDropdown = () => {
|
|||
const UserDropdown = () => {
|
||||
const { whoAmI } = useWhoAmI();
|
||||
const [isDropdownOpen, setDropdownOpen] = useState(false);
|
||||
const { realm } = useRealm();
|
||||
const { hasAccess } = useAccess();
|
||||
|
||||
const isMasterRealm = realm === "master";
|
||||
const isManager = hasAccess("manage-realm");
|
||||
|
||||
return (
|
||||
<Dropdown
|
||||
|
@ -140,7 +175,7 @@ const UserDropdown = () => {
|
|||
</MenuToggle>
|
||||
)}
|
||||
>
|
||||
<DropdownList>{userDropdownItems}</DropdownList>
|
||||
<DropdownList>{userDropdownItems(isMasterRealm, isManager)}</DropdownList>
|
||||
</Dropdown>
|
||||
);
|
||||
};
|
||||
|
|
101
js/apps/admin-ui/src/PageHeaderClearCachesModal.tsx
Normal file
|
@ -0,0 +1,101 @@
|
|||
import {
|
||||
AlertVariant,
|
||||
Button,
|
||||
Flex,
|
||||
FlexItem,
|
||||
List,
|
||||
ListItem,
|
||||
Modal,
|
||||
ModalVariant,
|
||||
} from "@patternfly/react-core";
|
||||
import { useRealm } from "./context/realm-context/RealmContext";
|
||||
import { useAdminClient } from "./admin-client";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { HelpItem, useAlerts } from "@keycloak/keycloak-ui-shared";
|
||||
|
||||
export type ClearCachesModalProps = {
|
||||
onClose: () => void;
|
||||
};
|
||||
export const PageHeaderClearCachesModal = ({
|
||||
onClose,
|
||||
}: ClearCachesModalProps) => {
|
||||
const { realm: realmName } = useRealm();
|
||||
const { t } = useTranslation();
|
||||
const { adminClient } = useAdminClient();
|
||||
const { addError, addAlert } = useAlerts();
|
||||
|
||||
const clearCache =
|
||||
(clearCacheFn: typeof adminClient.cache.clearRealmCache) =>
|
||||
async (realm: string) => {
|
||||
try {
|
||||
await clearCacheFn({ realm });
|
||||
addAlert(t("clearCacheSuccess"), AlertVariant.success);
|
||||
} catch (error) {
|
||||
addError("clearCacheError", error);
|
||||
}
|
||||
};
|
||||
const clearRealmCache = clearCache(adminClient.cache.clearRealmCache);
|
||||
const clearUserCache = clearCache(adminClient.cache.clearUserCache);
|
||||
const clearKeysCache = clearCache(adminClient.cache.clearKeysCache);
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title={t("clearCachesTitle")}
|
||||
variant={ModalVariant.small}
|
||||
isOpen
|
||||
onClose={onClose}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<List isPlain isBordered>
|
||||
<ListItem>
|
||||
<Flex justifyContent={{ default: "justifyContentSpaceBetween" }}>
|
||||
<FlexItem>
|
||||
{t("realmCache")}{" "}
|
||||
<HelpItem
|
||||
helpText={t("clearRealmCacheHelp")}
|
||||
fieldLabelId="clearRealmCacheHelp"
|
||||
/>
|
||||
</FlexItem>
|
||||
<FlexItem>
|
||||
<Button onClick={() => clearRealmCache(realmName)}>
|
||||
{t("clearButtonTitle")}
|
||||
</Button>
|
||||
</FlexItem>
|
||||
</Flex>
|
||||
</ListItem>
|
||||
<ListItem>
|
||||
<Flex justifyContent={{ default: "justifyContentSpaceBetween" }}>
|
||||
<FlexItem>
|
||||
{t("userCache")}{" "}
|
||||
<HelpItem
|
||||
helpText={t("clearUserCacheHelp")}
|
||||
fieldLabelId="clearUserCacheHelp"
|
||||
/>
|
||||
</FlexItem>
|
||||
<FlexItem>
|
||||
<Button onClick={() => clearUserCache(realmName)}>
|
||||
{t("clearButtonTitle")}
|
||||
</Button>
|
||||
</FlexItem>
|
||||
</Flex>
|
||||
</ListItem>
|
||||
<ListItem>
|
||||
<Flex justifyContent={{ default: "justifyContentSpaceBetween" }}>
|
||||
<FlexItem>
|
||||
{t("keysCache")}{" "}
|
||||
<HelpItem
|
||||
helpText={t("clearKeysCacheHelp")}
|
||||
fieldLabelId="clearKeysCacheHelp"
|
||||
/>
|
||||
</FlexItem>
|
||||
<FlexItem>
|
||||
<Button onClick={() => clearKeysCache(realmName)}>
|
||||
{t("clearButtonTitle")}
|
||||
</Button>
|
||||
</FlexItem>
|
||||
</Flex>
|
||||
</ListItem>
|
||||
</List>
|
||||
</Modal>
|
||||
);
|
||||
};
|
|
@ -66,6 +66,7 @@ const USER_VERIFY = [
|
|||
type WeauthnSelectProps = {
|
||||
name: string;
|
||||
label: string;
|
||||
labelIcon?: string;
|
||||
options: readonly string[];
|
||||
labelPrefix?: string;
|
||||
isMultiSelect?: boolean;
|
||||
|
@ -74,6 +75,7 @@ type WeauthnSelectProps = {
|
|||
const WebauthnSelect = ({
|
||||
name,
|
||||
label,
|
||||
labelIcon,
|
||||
options,
|
||||
labelPrefix,
|
||||
isMultiSelect = false,
|
||||
|
@ -82,7 +84,8 @@ const WebauthnSelect = ({
|
|||
return (
|
||||
<SelectControl
|
||||
name={name}
|
||||
label={t(label)}
|
||||
label={label}
|
||||
labelIcon={labelIcon}
|
||||
variant={isMultiSelect ? "typeaheadMulti" : "single"}
|
||||
controller={{ defaultValue: options[0] }}
|
||||
options={options.map((option) => ({
|
||||
|
@ -165,7 +168,8 @@ export const WebauthnPolicy = ({
|
|||
/>
|
||||
<WebauthnSelect
|
||||
name={`${namePrefix}SignatureAlgorithms`}
|
||||
label="webAuthnPolicySignatureAlgorithms"
|
||||
label={t("webAuthnPolicySignatureAlgorithms")}
|
||||
labelIcon={t("webAuthnPolicySignatureAlgorithmsHelp")}
|
||||
options={SIGNATURE_ALGORITHMS}
|
||||
isMultiSelect
|
||||
/>
|
||||
|
@ -176,32 +180,36 @@ export const WebauthnPolicy = ({
|
|||
/>
|
||||
<WebauthnSelect
|
||||
name={`${namePrefix}AttestationConveyancePreference`}
|
||||
label="webAuthnPolicyAttestationConveyancePreference"
|
||||
label={t("webAuthnPolicyAttestationConveyancePreference")}
|
||||
labelIcon={t("webAuthnPolicyAttestationConveyancePreferenceHelp")}
|
||||
options={ATTESTATION_PREFERENCE}
|
||||
labelPrefix="attestationPreference"
|
||||
/>
|
||||
<WebauthnSelect
|
||||
name={`${namePrefix}AuthenticatorAttachment`}
|
||||
label="webAuthnPolicyAuthenticatorAttachment"
|
||||
label={t("webAuthnPolicyAuthenticatorAttachment")}
|
||||
labelIcon={t("webAuthnPolicyAuthenticatorAttachmentHelp")}
|
||||
options={AUTHENTICATOR_ATTACHMENT}
|
||||
labelPrefix="authenticatorAttachment"
|
||||
/>
|
||||
<WebauthnSelect
|
||||
name={`${namePrefix}RequireResidentKey`}
|
||||
label="webAuthnPolicyRequireResidentKey"
|
||||
label={t("webAuthnPolicyRequireResidentKey")}
|
||||
labelIcon={t("webAuthnPolicyRequireResidentKeyHelp")}
|
||||
options={RESIDENT_KEY_OPTIONS}
|
||||
labelPrefix="residentKey"
|
||||
/>
|
||||
<WebauthnSelect
|
||||
name={`${namePrefix}UserVerificationRequirement`}
|
||||
label="webAuthnPolicyUserVerificationRequirement"
|
||||
label={t("webAuthnPolicyUserVerificationRequirement")}
|
||||
labelIcon={t("webAuthnPolicyUserVerificationRequirementHelp")}
|
||||
options={USER_VERIFY}
|
||||
labelPrefix="userVerify"
|
||||
/>
|
||||
<TimeSelectorControl
|
||||
name={`${namePrefix}CreateTimeout`}
|
||||
label={t("webAuthnPolicyCreateTimeout")}
|
||||
labelIcon={t("otpPolicyPeriodHelp")}
|
||||
labelIcon={t("webAuthnPolicyCreateTimeoutHelp")}
|
||||
units={["second", "minute", "hour"]}
|
||||
controller={{
|
||||
defaultValue: 0,
|
||||
|
|