Remove robots.txt entirely
* remove robots.txt entirely, as blocking page- crawling prevents the `X-Robots-Tag` headers (and similar meta tags) from working as intended. Closes #17433 Signed-off-by: Andy <andy@slice.is> Signed-off-by: Alexander Schwartz <aschwart@redhat.com> Co-authored-by: Alexander Schwartz <aschwart@redhat.com>
This commit is contained in:
parent
5246fffb03
commit
f994cc54d5
4 changed files with 4 additions and 46 deletions
|
@ -45,3 +45,7 @@ The {project_name} Operator will continue to configure `kubernetes` as a transpo
|
|||
|
||||
When developing extensions for {project_name}, developers can now specify dependencies between provider factories classes by implementing the method `dependsOn()` in the `ProviderFactory` interface.
|
||||
See the Javadoc for a detailed description.
|
||||
|
||||
= Removal of robots.txt file
|
||||
|
||||
The `robots.txt` file, previously included by default, is now removed. The default `robots.txt` file blocked all crawling, which prevented the `noindex`/`nofollow` directives from being followed. The desired default behaviour is for {project_name} pages to not show up in search engine results and this is accomplished by the existing `X-Robots-Tag` header, which is set to `none` by default. The value of this header can be overidden per-realm if a different behaviour is needed.
|
||||
|
|
|
@ -121,11 +121,6 @@ The following table shows the recommended paths to expose.
|
|||
|Yes
|
||||
|This path is needed to serve assets correctly. It may be served from a CDN instead of the {project_name} path.
|
||||
|
||||
|/robots.txt
|
||||
|/robots.txt
|
||||
|Yes
|
||||
|Search engine rules
|
||||
|
||||
|/metrics
|
||||
|-
|
||||
|No
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
/*
|
||||
* Copyright 2016 Red Hat, Inc. and/or its affiliates
|
||||
* and other contributors as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.keycloak.services.resources;
|
||||
|
||||
import org.keycloak.utils.MediaType;
|
||||
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.Produces;
|
||||
import jakarta.ws.rs.ext.Provider;
|
||||
|
||||
@Provider
|
||||
@Path("/robots.txt")
|
||||
public class RobotsResource {
|
||||
|
||||
private static final String robots = "User-agent: *\n" + "Disallow: /";
|
||||
|
||||
@GET
|
||||
@Produces(MediaType.TEXT_PLAIN_UTF_8)
|
||||
public String getRobots() {
|
||||
return robots;
|
||||
}
|
||||
|
||||
}
|
|
@ -28,7 +28,6 @@ import org.keycloak.services.filters.KeycloakSecurityHeadersFilter;
|
|||
import org.keycloak.services.resources.KeycloakApplication;
|
||||
import org.keycloak.services.resources.LoadBalancerResource;
|
||||
import org.keycloak.services.resources.RealmsResource;
|
||||
import org.keycloak.services.resources.RobotsResource;
|
||||
import org.keycloak.services.resources.ThemeResource;
|
||||
import org.keycloak.services.resources.WelcomeResource;
|
||||
import org.keycloak.services.resources.admin.AdminRoot;
|
||||
|
@ -43,7 +42,6 @@ public class ResteasyKeycloakApplication extends KeycloakApplication {
|
|||
protected Set<Class<?>> classes = new HashSet<>();
|
||||
|
||||
public ResteasyKeycloakApplication() {
|
||||
classes.add(RobotsResource.class);
|
||||
classes.add(RealmsResource.class);
|
||||
if (Profile.isFeatureEnabled(Profile.Feature.ADMIN_API)) {
|
||||
classes.add(AdminRoot.class);
|
||||
|
|
Loading…
Reference in a new issue