Adds failing remote operator logs to the main test log (#22990)
Closes #22985
This commit is contained in:
parent
b9dff1a555
commit
1545680c48
1 changed files with 42 additions and 22 deletions
|
@ -22,13 +22,14 @@ import io.fabric8.kubernetes.api.model.NamespaceBuilder;
|
|||
import io.fabric8.kubernetes.api.model.PodSpecFluent.ContainersNested;
|
||||
import io.fabric8.kubernetes.api.model.PodTemplateSpecFluent.SpecNested;
|
||||
import io.fabric8.kubernetes.api.model.Secret;
|
||||
import io.fabric8.kubernetes.api.model.apps.StatefulSet;
|
||||
import io.fabric8.kubernetes.api.model.apps.Deployment;
|
||||
import io.fabric8.kubernetes.client.Config;
|
||||
import io.fabric8.kubernetes.client.ConfigBuilder;
|
||||
import io.fabric8.kubernetes.client.KubernetesClient;
|
||||
import io.fabric8.kubernetes.client.KubernetesClientBuilder;
|
||||
import io.fabric8.kubernetes.client.KubernetesClientException;
|
||||
import io.fabric8.kubernetes.client.NamespacedKubernetesClient;
|
||||
import io.fabric8.kubernetes.client.dsl.RollableScalableResource;
|
||||
import io.fabric8.kubernetes.client.readiness.Readiness;
|
||||
import io.fabric8.kubernetes.client.utils.Serialization;
|
||||
import io.javaoperatorsdk.operator.Operator;
|
||||
|
@ -276,32 +277,51 @@ public class BaseOperatorTest implements QuarkusTestAfterEachCallback {
|
|||
Log.warnf("Test failed with %s: %s", context.getTestStatus().getTestErrorCause().getMessage(), context.getTestStatus().getTestErrorCause().getClass().getName());
|
||||
savePodLogs();
|
||||
// provide some helpful entries in the main log as well
|
||||
k8sclient.resources(Keycloak.class).list().getItems().stream()
|
||||
.filter(kc -> !Optional.ofNullable(kc.getStatus()).map(KeycloakStatus::isReady).orElse(false))
|
||||
.forEach(kc -> {
|
||||
Log.warnf("Keycloak failed to become ready \"%s\" %s", kc.getMetadata().getName(), Serialization.asYaml(kc.getStatus()));
|
||||
var statefulSet = k8sclient.resources(StatefulSet.class).withName(KeycloakDeployment.getName(kc)).get();
|
||||
if (statefulSet != null) {
|
||||
Log.warnf("Keycloak \"%s\" StatefulSet status %s", kc.getMetadata().getName(), Serialization.asYaml(statefulSet.getStatus()));
|
||||
k8sclient.pods().withLabels(statefulSet.getSpec().getSelector().getMatchLabels()).list()
|
||||
.getItems().stream().filter(pod -> !Readiness.isPodReady(pod)).forEach(pod -> {
|
||||
try {
|
||||
String log = k8sclient.pods().resource(pod).getLog();
|
||||
if (log.length() > 5000) {
|
||||
log = log.substring(log.length() - 5000);
|
||||
}
|
||||
Log.warnf("Not ready pod log \"%s\": %s", pod.getMetadata().getName(), log);
|
||||
} catch (KubernetesClientException e) {
|
||||
Log.warnf("No pod log for \"%s\": %s", pod.getMetadata().getName(), e.getMessage());
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
logFailedKeycloaks();
|
||||
logFailedOperator();
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
private void logFailedOperator() {
|
||||
if (operatorDeployment != OperatorDeployment.remote) {
|
||||
return;
|
||||
}
|
||||
RollableScalableResource<Deployment> deploymentResource = k8sclient.apps().deployments().withName("keycloak-operator");
|
||||
Deployment deployment = deploymentResource.get();
|
||||
if (!Readiness.isDeploymentReady(deployment)) {
|
||||
Log.warnf("Operator failed to become ready %s", Serialization.asYaml(deployment.getStatus()));
|
||||
try {
|
||||
String log = deploymentResource.getLog();
|
||||
Log.warnf("Not ready operator log: %s", log.substring(Math.min(log.length(), log.length() - 5000)));
|
||||
} catch (KubernetesClientException e) {
|
||||
Log.warnf("No operator log: %s", e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void logFailedKeycloaks() {
|
||||
k8sclient.resources(Keycloak.class).list().getItems().stream()
|
||||
.filter(kc -> !Optional.ofNullable(kc.getStatus()).map(KeycloakStatus::isReady).orElse(false))
|
||||
.forEach(kc -> {
|
||||
Log.warnf("Keycloak failed to become ready \"%s\" %s", kc.getMetadata().getName(), Serialization.asYaml(kc.getStatus()));
|
||||
var statefulSet = k8sclient.apps().statefulSets().withName(KeycloakDeployment.getName(kc)).get();
|
||||
if (statefulSet != null) {
|
||||
Log.warnf("Keycloak \"%s\" StatefulSet status %s", kc.getMetadata().getName(), Serialization.asYaml(statefulSet.getStatus()));
|
||||
k8sclient.pods().withLabels(statefulSet.getSpec().getSelector().getMatchLabels()).list()
|
||||
.getItems().stream().filter(pod -> !Readiness.isPodReady(pod)).forEach(pod -> {
|
||||
try {
|
||||
String log = k8sclient.pods().resource(pod).getLog();
|
||||
Log.warnf("Not ready pod log \"%s\": %s", pod.getMetadata().getName(), log.substring(Math.min(log.length(), log.length() - 5000)));
|
||||
} catch (KubernetesClientException e) {
|
||||
Log.warnf("No pod log for \"%s\": %s", pod.getMetadata().getName(), e.getMessage());
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
public static void after() throws FileNotFoundException {
|
||||
|
||||
|
|
Loading…
Reference in a new issue