diff --git a/testsuite/performance/README.log-tool.md b/testsuite/performance/README.log-tool.md
index 1692c34b07..9414460a84 100644
--- a/testsuite/performance/README.log-tool.md
+++ b/testsuite/performance/README.log-tool.md
@@ -7,7 +7,7 @@ Perform the usual test run:
mvn verify -Pteardown
mvn verify -Pprovision
mvn verify -Pgenerate-data -Ddataset=100users -Dimport.workers=10 -DhashIterations=100
-mvn verify -Ptest -Ddataset=100users -DrunUsers=200 -DrampUpPeriod=10 -DuserThinkTime=0 -DbadLoginAttempts=1 -DrefreshTokenCount=1 -DmeasurementPeriod=60
+mvn verify -Ptest -Ddataset=100users -DusersPerSec=5 -DrampUpPeriod=10 -DuserThinkTime=0 -DbadLoginAttempts=1 -DrefreshTokenCount=1 -DmeasurementPeriod=60
```
Now analyze the generated simulation.log (adjust LOG_DIR, FROM, and TO):
diff --git a/testsuite/performance/README.md b/testsuite/performance/README.md
index fdd93d7987..24b12ba1a8 100644
--- a/testsuite/performance/README.md
+++ b/testsuite/performance/README.md
@@ -27,7 +27,7 @@ mvn clean install
# Make sure your Docker daemon is running THEN
mvn verify -Pprovision
mvn verify -Pgenerate-data -Ddataset=100u -DnumOfWorkers=10 -DhashIterations=100
-mvn verify -Ptest -Ddataset=100u -DrunUsers=200 -DrampUpPeriod=10 -DuserThinkTime=0 -DbadLoginAttempts=1 -DrefreshTokenCount=1 -DmeasurementPeriod=60
+mvn verify -Ptest -Ddataset=100u -DusersPerSec=4.5 -DrampUpPeriod=10 -DuserThinkTime=0 -DbadLoginAttempts=1 -DrefreshTokenCount=1 -DmeasurementPeriod=60 -DfilterResults=true
```
@@ -40,7 +40,7 @@ mvn verify -Pteardown
You can perform all phases in a single run:
```
-mvn verify -Pprovision,generate-data,test,teardown -Ddataset=100u -DnumOfWorkers=10 -DhashIterations=100 -DrunUsers=200 -DrampUpPeriod=10
+mvn verify -Pprovision,generate-data,test,teardown -Ddataset=100u -DnumOfWorkers=10 -DhashIterations=100 -DusersPerSec=5 -DrampUpPeriod=10
```
Note: The order in which maven profiles are listed does not determine the order in which profile related plugins are executed. `teardown` profile always executes last.
@@ -141,13 +141,11 @@ Usage: `mvn verify -Ptest[,cluster] [-DtestParameter=value]`.
| --- | --- | --- |
| `gatling.simulationClass` | Classname of the simulation to be run. | `keycloak.BasicOIDCSimulation` |
| `dataset` | Name of the dataset to use. (Individual dataset properties can be overridden with `-Ddataset.property=value`.) | `default` |
-| `runUsers` | Number of users for the simulation run. | `1` |
+| `usersPerSec` | Arrival rate of new users per second. Can be a floating point number. | `1.0` |
| `rampUpPeriod` | Period during which the users will be ramped up. (seconds) | `0` |
| `warmUpPeriod` | Period with steady number of users intended for the system under test to warm up. (seconds) | `0` |
| `measurementPeriod` | A measurement period after the system is warmed up. (seconds) | `30` |
-| `rampDownASAP` | When `true` the test will be checking for ramp-down condition after each *scenario step*. When `false` the check will be done only at the end of a *scenario iteration*. | `false` |
| `filterResults` | Whether to filter out requests which are outside of the `measurementPeriod`. | `false` |
-| `pace` | A dynamic pause after each *scenario iteration*. For example if the pace is 30s and one scenario iteration takes only 20s, the simulation will wait additional 10s before continuing to the next iteration. | `0` |
| `userThinkTime` | Pause between individual scenario steps. | `5` |
| `refreshTokenPeriod`| Period after which token should be refreshed. | `10` |
@@ -161,7 +159,7 @@ Usage: `mvn verify -Ptest[,cluster] [-DtestParameter=value]`.
Example:
-`mvn verify -Ptest -Dgatling.simulationClass=keycloak.AdminConsoleSimulation -Ddataset=100u -DrunUsers=1 -DmeasurementPeriod=60 -DuserThinkTime=0 -DrefreshTokenPeriod=15`
+`mvn verify -Ptest -Dgatling.simulationClass=keycloak.AdminConsoleSimulation -Ddataset=100u -DusersPerSec=1 -DmeasurementPeriod=60 -DuserThinkTime=0 -DrefreshTokenPeriod=15`
## Monitoring
@@ -210,25 +208,25 @@ Results will be stored in folder: `tests/target/sar`.
- Provision single node of KC + DB, generate data, run test, and tear down the provisioned system:
- `mvn verify -Pprovision,generate-data,test,teardown -Ddataset=100u -DrunUsers=100`
+ `mvn verify -Pprovision,generate-data,test,teardown -Ddataset=100u -DusersPerSec=5`
- Provision single node of KC + DB, generate data, no test, no teardown:
`mvn verify -Pprovision,generate-data -Ddataset=100u`
-- Run test against provisioned system using 100 concurrent users ramped up over 10 seconds, then tear it down:
+- Run test against provisioned system generating 5 new users per second, ramped up over 10 seconds, then tear it down:
- `mvn verify -Ptest,teardown -Ddataset=100u -DrunUsers=100 -DrampUpPeriod=10`
+ `mvn verify -Ptest,teardown -Ddataset=100u -DusersPerSec=5 -DrampUpPeriod=10`
### Cluster
- Provision a 1-node KC cluster + DB, generate data, run test against the provisioned system, then tear it down:
- `mvn verify -Pprovision,cluster,generate-data,test,teardown -Ddataset=100u -DrunUsers=100`
+ `mvn verify -Pprovision,cluster,generate-data,test,teardown -Ddataset=100u -DusersPerSec=5`
- Provision a 2-node KC cluster + DB, generate data, run test against the provisioned system, then tear it down:
- `mvn verify -Pprovision,cluster,generate-data,test,teardown -Dkeycloak.scale=2 -DusersPerRealm=200 -DrunUsers=200`
+ `mvn verify -Pprovision,cluster,generate-data,test,teardown -Dkeycloak.scale=2 -DusersPerRealm=200 -DusersPerSec=5`
## Developing tests in IntelliJ IDEA
diff --git a/testsuite/performance/tests/src/main/java/org/keycloak/performance/TestConfig.java b/testsuite/performance/tests/src/main/java/org/keycloak/performance/TestConfig.java
index 6d7d5d7845..288b06768a 100644
--- a/testsuite/performance/tests/src/main/java/org/keycloak/performance/TestConfig.java
+++ b/testsuite/performance/tests/src/main/java/org/keycloak/performance/TestConfig.java
@@ -1,13 +1,12 @@
package org.keycloak.performance;
+import java.text.SimpleDateFormat;
import org.keycloak.performance.util.FilteredIterator;
import org.keycloak.performance.util.LoopingIterator;
-import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
-import java.util.Date;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ThreadLocalRandom;
@@ -55,7 +54,7 @@ public class TestConfig {
//
// Settings used by tests to control common test parameters
//
- public static final int runUsers = Integer.getInteger("runUsers", 1);
+ public static final double usersPerSec = Double.valueOf(System.getProperty("usersPerSec", "1"));
public static final int rampUpPeriod = Integer.getInteger("rampUpPeriod", 0);
public static final int warmUpPeriod = Integer.getInteger("warmUpPeriod", 0);
public static final int measurementPeriod = Integer.getInteger("measurementPeriod", 30);
@@ -118,17 +117,28 @@ public class TestConfig {
}
public static String toStringCommonTestParameters() {
- return String.format(" runUsers: %s\n"
- + " rampUpPeriod: %s\n"
- + " warmUpPeriod: %s\n"
- + " measurementPeriod: %s\n"
- + " rampDownASAP: %s\n"
- + " filterResults: %s\n"
- + " pace: %s\n"
- + " userThinkTime: %s\n"
- + " refreshTokenPeriod: %s",
- runUsers, rampUpPeriod, warmUpPeriod, measurementPeriod, rampDownASAP, filterResults, pace, userThinkTime, refreshTokenPeriod
- );
+ return String.format(
+ " usersPerSec: %s\n" +
+ " rampUpPeriod: %s\n"+
+ " warmUpPeriod: %s\n"+
+ " measurementPeriod: %s\n"+
+ " filterResults: %s\n"+
+ " userThinkTime: %s\n"+
+ " refreshTokenPeriod: %s",
+ usersPerSec, rampUpPeriod, warmUpPeriod, measurementPeriod, filterResults, userThinkTime, refreshTokenPeriod);
+ }
+
+ public static SimpleDateFormat SIMPLE_TIME = new SimpleDateFormat("HH:mm:ss");
+
+ public static String toStringTimestamps() {
+ return String.format(" simulationStartTime: %s\n"
+ + " warmUpStartTime: %s\n"
+ + " measurementStartTime: %s\n"
+ + " measurementEndTime: %s",
+ SIMPLE_TIME.format(simulationStartTime),
+ SIMPLE_TIME.format(warmUpStartTime),
+ SIMPLE_TIME.format(measurementStartTime),
+ SIMPLE_TIME.format(measurementEndTime));
}
public static String toStringDatasetProperties() {
diff --git a/testsuite/performance/tests/src/main/java/org/keycloak/performance/log/LogLine.java b/testsuite/performance/tests/src/main/java/org/keycloak/performance/log/LogLine.java
new file mode 100644
index 0000000000..4fa33de04e
--- /dev/null
+++ b/testsuite/performance/tests/src/main/java/org/keycloak/performance/log/LogLine.java
@@ -0,0 +1,178 @@
+package org.keycloak.performance.log;
+
+/**
+ * @author Marko Strukelj
+ */
+class LogLine {
+
+ private final String rawLine;
+ private Type type;
+ private String simulationClass;
+ private String simulationId;
+ private String description;
+ private String scenario;
+ private String userId;
+ private String request;
+ private long start = -1;
+ private long end = -1;
+ private long time2 = -1;
+ private long time3 = -1;
+ private boolean ok;
+ private String status;
+
+ LogLine(String line) {
+ rawLine = line;
+ }
+
+ String rawLine() {
+ return rawLine;
+ }
+
+ Type type() {
+ return type != null ? type : parse().type;
+ }
+
+ String simulationClass() {
+ return simulationClass;
+ }
+
+ public String simulationId() {
+ return simulationId;
+ }
+
+ public String description() {
+ return description;
+ }
+
+ long startTime() {
+ return type != null ? start : parse().start;
+ }
+
+ long endTime() {
+ return type != null ? end : parse().end;
+ }
+
+ String scenario() {
+ return type != null ? scenario : parse().scenario;
+ }
+
+ String userId() {
+ return type != null ? userId : parse().userId;
+ }
+
+ String request() {
+ return type != null ? request : parse().request;
+ }
+
+ public void setStart(long start) {
+ this.start = start;
+ }
+
+ public void setEnd(long end) {
+ this.end = end;
+ }
+
+ long logTime() {
+ if (type == null) {
+ parse();
+ }
+ return type == Type.RUN || type == Type.USER_START ? start : end;
+ }
+
+ boolean ok() {
+ if (type == null) {
+ parse();
+ }
+ return type != null ? ok : parse().ok;
+ }
+
+ LogLine parse() {
+ String[] cols = rawLine.split("\\t");
+
+ if ("RUN".equals(cols[2])) {
+ type = Type.RUN;
+ simulationClass = cols[0];
+ simulationId = cols[1];
+ start = Long.parseLong(cols[3]);
+ description = cols[4];
+ } else if ("REQUEST".equals(cols[2])) {
+ type = Type.REQUEST;
+ scenario = cols[0];
+ userId = cols[1];
+ request = cols[4];
+ start = Long.parseLong(cols[5]);
+ time2 = Long.parseLong(cols[6]);
+ time3 = Long.parseLong(cols[7]);
+ end = Long.parseLong(cols[8]);
+ ok = "OK".equals(cols[9]);
+ if (cols.length > 10) {
+ StringBuilder sb = new StringBuilder();
+ for (int i = 10; i < cols.length; i++) {
+ if (sb.length() > 0) {
+ sb.append("\t");
+ }
+ sb.append(cols[i]);
+ }
+ status = sb.toString();
+ }
+ } else if ("USER".equals(cols[2])) {
+ if ("START".equals(cols[3])) {
+ type = Type.USER_START;
+ } else if ("END".equals(cols[3])) {
+ type = Type.USER_END;
+ } else {
+ throw new RuntimeException("Unknown log entry type: USER " + cols[3]);
+ }
+ scenario = cols[0];
+ userId = cols[1];
+ start = Long.parseLong(cols[4]);
+ end = Long.parseLong(cols[5]);
+ } else {
+ throw new RuntimeException("Unknow log entry type: " + cols[2]);
+ }
+
+ return this;
+ }
+
+ /**
+ * @return Reconstructed LogLine
+ */
+ public String compose() {
+ switch (type()) {
+ case RUN: {
+ return simulationClass + "\t" + simulationId + "\t" + type.caption() + "\t" + start + "\t"+ description +"\t2.0\t";
+ }
+ case REQUEST: {
+ return scenario + "\t" + userId + "\t" + type.caption() + "\t" + request + "\t" + start + "\t" +
+ time2 + "\t" + time3 + "\t" + end + "\t" + (ok ? "OK" : "KO") + "\t" + (status != null ? status : "");
+ }
+ case USER_START: {
+ return scenario + "\t" + userId + "\t" + type.caption() + "\t" + start + "\t0";
+
+ }
+ case USER_END: {
+ return scenario + "\t" + userId + "\t" + type.caption() + "\t" + start + "\t" + end;
+
+ }
+ }
+ throw new IllegalStateException("Unhandled type: " + type());
+ }
+
+
+ enum Type {
+ RUN("RUN"),
+ REQUEST("REQUEST\t"),
+ USER_START("USER\tSTART"),
+ USER_END("USER\tEND");
+
+ private String caption;
+
+ Type(String caption) {
+ this.caption = caption;
+ }
+
+ public String caption() {
+ return caption;
+ }
+ }
+}
diff --git a/testsuite/performance/tests/src/main/java/org/keycloak/performance/log/LogProcessor.java b/testsuite/performance/tests/src/main/java/org/keycloak/performance/log/LogProcessor.java
index da835f25ae..55f6889b99 100644
--- a/testsuite/performance/tests/src/main/java/org/keycloak/performance/log/LogProcessor.java
+++ b/testsuite/performance/tests/src/main/java/org/keycloak/performance/log/LogProcessor.java
@@ -1,41 +1,28 @@
package org.keycloak.performance.log;
-import java.io.BufferedReader;
import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
import java.io.FileOutputStream;
-import java.io.FileReader;
import java.io.IOException;
-import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
-import java.io.Reader;
-import java.io.Writer;
-import java.nio.charset.Charset;
-import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
+import static org.keycloak.performance.TestConfig.SIMPLE_TIME;
/**
* To run use the following:
*
- * mvn -f testsuite/integration-arquillian/tests/performance/gatling-perf exec:java -Dexec.mainClass=org.keycloak.performance.log.LogProcessor -Dexec.args="ARGUMENTS"
+ * mvn -f testsuite/integration-arquillian/performance/tests exec:java -Dexec.mainClass=org.keycloak.performance.log.LogProcessor -Dexec.args="ARGUMENTS"
*
* @author Marko Strukelj
*/
public class LogProcessor {
- static boolean INLAYED_INCLUDED = false;
- static boolean OUTLAYED_INCLUDED = false;
- static boolean COMPLETE_SESSIONS = false;
+ boolean inlayedIncluded = false;
+ boolean outlayedIncluded = false;
File simulationLogFile;
String lastRequestLabel;
@@ -152,16 +139,30 @@ public class LogProcessor {
}
public void filterLog(long start, long end) throws IOException {
- filterLog(start, end, INLAYED_INCLUDED, OUTLAYED_INCLUDED, COMPLETE_SESSIONS);
+ filterLog(start, end, inlayedIncluded, outlayedIncluded);
}
-
- public void filterLog(long start, long end, boolean inlayedIncluded, boolean outlayedIncluded, boolean completeSessions) throws IOException {
- INLAYED_INCLUDED = inlayedIncluded;
- OUTLAYED_INCLUDED = outlayedIncluded;
- COMPLETE_SESSIONS = completeSessions;
+
+ /**
+ *
+ * Setting inlayedIncluded to either true or false, results in first second anomaly towards lower values
+ * Setting outlayedIncluded to true, and inlayedIncluded to false seems to behave best balancing the first second
+ * downward skew with the last second upward skew. But it varies between test, and it's hard to say.
+ *
+ * All requests within time interval will also have their corresponding USER START and USER END entries copied
+ * over, and adjusted to time interval start and end boundaries.
+ *
+ * @param start Time stamp at which to start copying over logged entries
+ * @param end Time stamp beyond which logging entries are no longer copied over
+ * @param inlayedIncluded Requests that start before interval start time but end within time interval should be included
+ * @param outlayedIncluded Requests that start within time interval but end after interval end time should be included
+ * @throws IOException
+ */
+ public void filterLog(long start, long end, boolean inlayedIncluded, boolean outlayedIncluded) throws IOException {
+ this.inlayedIncluded = inlayedIncluded;
+ this.outlayedIncluded = outlayedIncluded;
- System.out.println(String.format("Filtering %s/%s \nINLAYED_INCLUDED: %s \nOUTLAYED_INCLUDED: %s \nCOMPLETE_SESSIONS: %s",
- simulationLogFile.getParentFile().getName(), simulationLogFile.getName(), INLAYED_INCLUDED, OUTLAYED_INCLUDED, COMPLETE_SESSIONS));
+ System.out.println(String.format("Filtering %s/%s \ninlayedIncluded: %s \noutlayedIncluded: %s",
+ simulationLogFile.getParentFile().getName(), simulationLogFile.getName(), this.inlayedIncluded, this.outlayedIncluded));
File simulationLogFileFiltered = new File(simulationLogFile.getAbsoluteFile() + ".filtered");
File simulationLogFileUnfiltered = new File(simulationLogFile.getAbsoluteFile() + ".unfiltered");
@@ -175,19 +176,16 @@ public class LogProcessor {
public void copyPartialLog(PrintWriter output, long start, long end) throws IOException {
- File tmpFile = null, headFile = null;
- PrintWriter tmp = null, startHead = null;
+ System.out.println(String.format("Extracting from: %s to %s (%s - %s)", SIMPLE_TIME.format(start), SIMPLE_TIME.format(end), start, end));
+
HashMap starts = new HashMap<>();
- HashSet actives = new HashSet<>();
+ HashMap sessionTimes = new HashMap<>();
- if (COMPLETE_SESSIONS) {
- tmp = output;
- tmpFile = getTmpFile();
- output = new PrintWriter(new OutputStreamWriter(new FileOutputStream(tmpFile), "utf-8"));
- headFile = getTmpFile();
- startHead = new PrintWriter(new OutputStreamWriter(new FileOutputStream(headFile), "utf-8"));
- }
+ // We adjust log entires so that stats are correctly calculated
+ // - we add USER START entries for user sessions that cross the time period boundaries
+ // - we adjust start time of USER START entries to time interval start time
+ // - we adjust end time of USER END entries that occur beyond the time boundaries to time interval end time
LogReader reader = new LogReader(simulationLogFile);
try {
@@ -195,284 +193,106 @@ public class LogProcessor {
while ((line = reader.readLine()) != null) {
if (line.type() == LogLine.Type.RUN) {
- if (COMPLETE_SESSIONS) {
- startHead.println(line.rawLine());
- } else {
- output.println(line.rawLine());
- }
+ // adjust start time of simulation
+ line.setStart(start);
+ output.println(line.compose());
continue;
}
long startTime = line.startTime();
long endTime = line.endTime();
- if (startTime >= start && startTime < end) {
- if (OUTLAYED_INCLUDED) {
- output.println(line.rawLine());
- } else if (endTime < end) {
- output.println(line.rawLine());
- }
- } else if (INLAYED_INCLUDED && endTime >= start && endTime < end) {
- output.println(line.rawLine());
- }
- if (COMPLETE_SESSIONS) {
- if (line.type() == LogLine.Type.USER_START) {
- if (startTime < end) {
- starts.put(line.userId(), line);
+ if (line.type() == LogLine.Type.USER_END) {
+ // if it is a USER_END we should adjust it to end time of the last and start time of the first REQUEST
+ // for that session within the interval
+ long[] times = sessionTimes.remove(line.userId());
+
+ // We ignore USER END entries that end before interval start, and those with start time after interval end
+ if (endTime >= start && startTime < end && startTime >= start) {
+ if (times == null) {
+ throw new IllegalStateException("There should be session info present for user: " + line.userId());
}
- } else if (line.type() == LogLine.Type.USER_END) {
- if (endTime < start) {
- starts.remove(line.userId());
- } else if (endTime >= start && endTime < end) {
- LogLine sline = starts.remove(line.userId());
- if (sline != null) {
- startHead.println(sline.rawLine());
- }
- actives.remove(line.userId());
- } else if (endTime >= end) {
- boolean was = actives.remove(line.userId());
- if (was) {
- output.println(line.rawLine());
- }
+ line.setStart(times[0]);
+ // if USER END ends outside the time interval adjust it to end time of last request
+ if (line.endTime() >= end) {
+ line.setEnd(end);
+ //line.setEnd(times[1]);
+ ////line.end = times[1] > end ? times[1] : end;
}
- } else if (line.type() == LogLine.Type.REQUEST) {
- if ((startTime >= start && startTime < end) || (endTime >= start && endTime < end)) {
- LogLine sline = starts.remove(line.userId());
- if (sline != null) {
- startHead.println(sline.rawLine());
+
+ output.println(line.compose());
+ }
+
+ // make sure any cached start line is also cleaned
+ starts.remove(line.userId());
+
+ } else if (line.type() == LogLine.Type.USER_START) {
+ if (startTime < start) {
+ // if it is a USER START before the time period start, we should adjust it to start time of the first
+ // REQUEST for that session within the interval, so at this point we need to store the entry for later
+ starts.put(line.userId(), line);
+ } else if (startTime < end) {
+ // it's within the interval thus no adjustment needed
+ starts.put(line.userId(), line);
+ //output.println(line.rawLine());
+ }
+
+ // we ignore USER START entries beyond the time interval
+
+ } else if (line.type() == LogLine.Type.REQUEST) {
+
+ // REQUEST entry needs processing if it starts within the time interval or if it ends within time interval
+ // - if we process INLAYED
+ boolean process = (startTime >= start && startTime < end) ||
+ (inlayedIncluded && endTime >= start && endTime < end);
+
+ if (process) {
+ // store start time of first request entry, and end time of last request entry for session within the interval
+ long[] times = sessionTimes.get(line.userId());
+ if (times == null) {
+ times = new long[]{line.startTime(), line.endTime()};
+ sessionTimes.put(line.userId(), times);
+
+ // if this is the first REQUEST within interval there may be a cached start line
+ LogLine startLine = starts.remove(line.userId());
+ if (startLine != null) {
+ // fix startLine's start time if necessary and recompose it writing it to output
+ if (startLine.startTime() < start) {
+ startLine.setStart(start);
+ //startLine.setStart(line.startTime());
+ ////startLine.start = line.startTime() < start ? line.startTime() : start;
+ output.println(startLine.compose());
+ } else {
+ output.println(startLine.rawLine());
+ }
+ times[0] = startLine.startTime();
}
- actives.add(line.userId());
+ } else {
+ times[1] = line.endTime();
}
+
+ if (outlayedIncluded) {
+ // if entry is started within the time interval we copy it over regardless if ended outside
+ output.println(line.rawLine());
+ } else if (endTime < end) {
+ // if entry is started within the time interval and ended within a time interval we copy it over
+ output.println(line.rawLine());
+ }
+
}
}
}
} finally {
reader.close();
output.flush();
-
- if (COMPLETE_SESSIONS) {
- startHead.close();
- output.close();
-
-
- BufferedReader tmpIn = new BufferedReader(new FileReader(headFile));
- copyStream(tmpIn, tmp);
-
- tmpIn = new BufferedReader(new FileReader(tmpFile));
- copyStream(tmpIn, tmp);
-
- tmp.flush();
-
- Files.delete(headFile.toPath());
- Files.delete(tmpFile.toPath());
- }
}
}
- private static void copyStream(Reader reader, Writer writer) throws IOException {
- char [] buff = new char[16 * 1024];
- int rc;
- while ((rc = reader.read(buff)) != -1) {
- writer.write(buff, 0, rc);
- }
- }
-
- private File getTmpFile() throws IOException {
- return Files.createTempFile("perftest", ".log").toFile();
- }
-
public void setLastRequestLabel(String lastRequestLabel) {
this.lastRequestLabel = lastRequestLabel;
}
- static class Stats {
- private long startTime;
-
- // timestamp at which rampUp is complete
- private long lastUserStart;
-
- // timestamp at which first user completed the simulation
- private long firstUserEnd;
-
- // timestamp at which all users completed the simulation
- private long lastUserEnd;
-
- // timestamps of iteration completions - when all users achieved last step of the scenario - for each scenario in the log file
- private ConcurrentHashMap> completedIterations = new ConcurrentHashMap<>();
-
- private LinkedHashMap> scenarioRequests = new LinkedHashMap<>();
-
- private HashMap requestCounters = new HashMap<>();
-
- public void setStartTime(long startTime) {
- this.startTime = startTime;
- }
-
- public void setLastUserStart(long lastUserStart) {
- this.lastUserStart = lastUserStart;
- }
-
- public void setFirstUserEnd(long firstUserEnd) {
- this.firstUserEnd = firstUserEnd;
- }
-
- public long firstUserEnd() {
- return firstUserEnd;
- }
-
- public void setLastUserEnd(long lastUserEnd) {
- this.lastUserEnd = lastUserEnd;
- }
-
- public void addIterationCompletedByAll(String scenario, long time) {
- this.completedIterations.computeIfAbsent(scenario, k -> new ArrayList<>())
- .add(time);
- }
-
- public void addRequest(String scenario, String request) {
- Set requests = scenarioRequests.get(scenario);
- if (requests == null) {
- requests = new LinkedHashSet<>();
- scenarioRequests.put(scenario, requests);
- }
- requests.add(request);
- incrementRequestCounter(scenario, request);
- }
-
- public Map> requestNames() {
- return scenarioRequests;
- }
-
- private void incrementRequestCounter(String scenario, String requestName) {
- String key = scenario + "." + requestName;
- int count = requestCounters.getOrDefault(key, 0);
- requestCounters.put(key, count+1);
- }
-
- public int requestCount(String scenario, String requestName) {
- String key = scenario + "." + requestName;
- return requestCounters.getOrDefault(key, 0);
- }
- }
-
-
- static class LogReader {
-
- private BufferedReader reader;
-
- LogReader(File file) throws FileNotFoundException {
- reader = new BufferedReader(new InputStreamReader(new FileInputStream(file), Charset.forName("utf-8")));
- }
-
- LogLine readLine() throws IOException {
- String line = reader.readLine();
- return line != null ? new LogLine(line) : null;
- }
-
- void close() throws IOException {
- reader.close();
- }
- }
-
- static class LogLine {
-
- private String rawLine;
- private Type type;
- private String scenario;
- private String userId;
- private String request;
- private long start = -1;
- private long end = -1;
- private boolean ok;
-
- LogLine(String line) {
- rawLine = line;
- }
-
- String rawLine() {
- return rawLine;
- }
-
- Type type() {
- return type != null ? type : parse().type;
- }
-
- long startTime() {
- return type != null ? start : parse().start;
- }
-
- long endTime() {
- return type != null ? end : parse().end;
- }
-
- String scenario() {
- return type != null ? scenario : parse().scenario;
- }
-
- String userId() {
- return type != null ? userId : parse().userId;
- }
-
- String request() {
- return type != null ? request : parse().request;
- }
-
- long logTime() {
- if (type == null) {
- parse();
- }
- return type == Type.RUN || type == Type.USER_START ? start : end;
- }
-
- boolean ok() {
- if (type == null) {
- parse();
- }
- return type != null ? ok : parse().ok;
- }
-
- LogLine parse() {
- String [] cols = rawLine.split("\\t");
-
- if ("RUN".equals(cols[2])) {
- type = Type.RUN;
- start = Long.parseLong(cols[3]);
- } else if ("REQUEST".equals(cols[2])) {
- type = Type.REQUEST;
- scenario = cols[0];
- userId = cols[1];
- request = cols[4];
- start = Long.parseLong(cols[5]);
- end = Long.parseLong(cols[8]);
- ok = "OK".equals(cols[9]);
- } else if ("USER".equals(cols[2])) {
- if ("START".equals(cols[3])) {
- type = Type.USER_START;
- } else if ("END".equals(cols[3])) {
- type = Type.USER_END;
- } else {
- throw new RuntimeException("Unknown log entry type: USER " + cols[3]);
- }
- scenario = cols[0];
- userId = cols[1];
- start = Long.parseLong(cols[4]);
- end = Long.parseLong(cols[5]);
- } else {
- throw new RuntimeException("Unknow log entry type: " + cols[2]);
- }
-
- return this;
- }
-
- enum Type {
- RUN,
- REQUEST,
- USER_START,
- USER_END
- }
- }
-
public static void main(String [] args) {
if (args == null || args.length == 0) {
@@ -533,11 +353,6 @@ public class LogProcessor {
}
endMillis = Long.valueOf(args[++i]);
break;
- case "--completeSessions":
- //INLAYED_INCLUDED = true;
- OUTLAYED_INCLUDED = true;
- COMPLETE_SESSIONS = true;
- break;
case "--lastRequest":
if (i == args.length - 1) {
throw new RuntimeException("Argument " + arg + " requires a LABEL");
@@ -567,11 +382,11 @@ public class LogProcessor {
if (performStat) {
Stats stats = proc.stats();
// Print out results
- System.out.println("Start time: " + stats.startTime);
- System.out.println("End time: " + stats.lastUserEnd);
- System.out.println("Duration (ms): " + (stats.lastUserEnd - stats.startTime));
- System.out.println("Ramping up completes at: " + stats.lastUserStart);
- System.out.println("Ramping down starts at: " + stats.firstUserEnd);
+ System.out.println("Start time: " + stats.getStartTime());
+ System.out.println("End time: " + stats.getLastUserEnd());
+ System.out.println("Duration (ms): " + (stats.getLastUserEnd() - stats.getStartTime()));
+ System.out.println("Ramping up completes at: " + stats.getLastUserStart());
+ System.out.println("Ramping down starts at: " + stats.getFirstUserEnd());
System.out.println();
System.out.println("HTTP Requests:");
@@ -583,7 +398,7 @@ public class LogProcessor {
System.out.println();
System.out.println("Times of completed iterations:");
- for (Map.Entry> ent: stats.completedIterations.entrySet()) {
+ for (Map.Entry> ent: stats.getCompletedIterations().entrySet()) {
System.out.println(" " + ent.getKey() + ": " + ent.getValue());
}
}
diff --git a/testsuite/performance/tests/src/main/java/org/keycloak/performance/log/LogReader.java b/testsuite/performance/tests/src/main/java/org/keycloak/performance/log/LogReader.java
new file mode 100644
index 0000000000..7cfdbbfdd7
--- /dev/null
+++ b/testsuite/performance/tests/src/main/java/org/keycloak/performance/log/LogReader.java
@@ -0,0 +1,30 @@
+package org.keycloak.performance.log;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.nio.charset.Charset;
+
+/**
+ * @author Marko Strukelj
+ */
+class LogReader {
+
+ private BufferedReader reader;
+
+ LogReader(File file) throws FileNotFoundException {
+ reader = new BufferedReader(new InputStreamReader(new FileInputStream(file), Charset.forName("utf-8")));
+ }
+
+ LogLine readLine() throws IOException {
+ String line = reader.readLine();
+ return line != null ? new LogLine(line) : null;
+ }
+
+ void close() throws IOException {
+ reader.close();
+ }
+}
diff --git a/testsuite/performance/tests/src/main/java/org/keycloak/performance/log/Stats.java b/testsuite/performance/tests/src/main/java/org/keycloak/performance/log/Stats.java
new file mode 100644
index 0000000000..aa5a831e3f
--- /dev/null
+++ b/testsuite/performance/tests/src/main/java/org/keycloak/performance/log/Stats.java
@@ -0,0 +1,102 @@
+package org.keycloak.performance.log;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * @author Marko Strukelj
+ */
+class Stats {
+ private long startTime;
+
+ // timestamp at which rampUp is complete
+ private long lastUserStart;
+
+ // timestamp at which first user completed the simulation
+ private long firstUserEnd;
+
+ // timestamp at which all users completed the simulation
+ private long lastUserEnd;
+
+ // timestamps of iteration completions - when all users achieved last step of the scenario - for each scenario in the log file
+ private ConcurrentHashMap> completedIterations = new ConcurrentHashMap<>();
+
+ private LinkedHashMap> scenarioRequests = new LinkedHashMap<>();
+
+ private HashMap requestCounters = new HashMap<>();
+
+ public void setStartTime(long startTime) {
+ this.startTime = startTime;
+ }
+
+ public void setLastUserStart(long lastUserStart) {
+ this.lastUserStart = lastUserStart;
+ }
+
+ public void setFirstUserEnd(long firstUserEnd) {
+ this.firstUserEnd = firstUserEnd;
+ }
+
+ public long firstUserEnd() {
+ return firstUserEnd;
+ }
+
+ public void setLastUserEnd(long lastUserEnd) {
+ this.lastUserEnd = lastUserEnd;
+ }
+
+ public long getStartTime() {
+ return startTime;
+ }
+
+ public long getLastUserStart() {
+ return lastUserStart;
+ }
+
+ public long getFirstUserEnd() {
+ return firstUserEnd;
+ }
+
+ public long getLastUserEnd() {
+ return lastUserEnd;
+ }
+
+ public Map> getCompletedIterations() {
+ return completedIterations;
+ }
+
+ public void addIterationCompletedByAll(String scenario, long time) {
+ this.completedIterations.computeIfAbsent(scenario, k -> new ArrayList<>())
+ .add(time);
+ }
+
+ public void addRequest(String scenario, String request) {
+ Set requests = scenarioRequests.get(scenario);
+ if (requests == null) {
+ requests = new LinkedHashSet<>();
+ scenarioRequests.put(scenario, requests);
+ }
+ requests.add(request);
+ incrementRequestCounter(scenario, request);
+ }
+
+ public Map> requestNames() {
+ return scenarioRequests;
+ }
+
+ private void incrementRequestCounter(String scenario, String requestName) {
+ String key = scenario + "." + requestName;
+ int count = requestCounters.getOrDefault(key, 0);
+ requestCounters.put(key, count + 1);
+ }
+
+ public int requestCount(String scenario, String requestName) {
+ String key = scenario + "." + requestName;
+ return requestCounters.getOrDefault(key, 0);
+ }
+}
diff --git a/testsuite/performance/tests/src/test/scala/keycloak/AdminConsoleScenarioBuilder.scala b/testsuite/performance/tests/src/test/scala/keycloak/AdminConsoleScenarioBuilder.scala
index 0bdb140dd5..1a822b16c1 100644
--- a/testsuite/performance/tests/src/test/scala/keycloak/AdminConsoleScenarioBuilder.scala
+++ b/testsuite/performance/tests/src/test/scala/keycloak/AdminConsoleScenarioBuilder.scala
@@ -53,7 +53,7 @@ class AdminConsoleScenarioBuilder {
var chainBuilder = exec(s => {
val realm = TestConfig.randomRealmsIterator().next()
- val serverUrl = TestConfig.serverUrisList.get(0)
+ val serverUrl = TestConfig.serverUrisIterator.next()
s.setAll(
"keycloakServer" -> serverUrl,
"keycloakServerUrlEncoded" -> urlencode(serverUrl),
diff --git a/testsuite/performance/tests/src/test/scala/keycloak/AdminConsoleSimulation.scala b/testsuite/performance/tests/src/test/scala/keycloak/AdminConsoleSimulation.scala
index 8ddb9a9597..ac59323533 100644
--- a/testsuite/performance/tests/src/test/scala/keycloak/AdminConsoleSimulation.scala
+++ b/testsuite/performance/tests/src/test/scala/keycloak/AdminConsoleSimulation.scala
@@ -5,22 +5,13 @@ import io.gatling.http.Predef._
import keycloak.CommonScenarioBuilder._
-import io.gatling.core.validation.Validation
import org.keycloak.performance.TestConfig
/**
* @author Marko Strukelj
*/
-class AdminConsoleSimulation extends Simulation {
-
- println()
- println("Target server: " + TestConfig.serverUrisList.get(0))
- println()
- println("Using test parameters:\n" + TestConfig.toStringCommonTestParameters);
- println()
- println("Using dataset properties:\n" + TestConfig.toStringDatasetProperties)
-
+class AdminConsoleSimulation extends CommonSimulation {
val httpProtocol = http
.baseURL("http://localhost:8080")
@@ -31,9 +22,6 @@ class AdminConsoleSimulation extends Simulation {
.acceptLanguageHeader("en-US,en;q=0.5")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:54.0) Gecko/20100101 Firefox/54.0")
-
-
-
val adminSession = new AdminConsoleScenarioBuilder()
.openAdminConsoleHome()
.thinkPause()
@@ -90,18 +78,12 @@ class AdminConsoleSimulation extends Simulation {
.thinkPause()
- val adminScenario = scenario("AdminConsole")
- .asLongAs(s => rampDownNotStarted(), null, TestConfig.rampDownASAP) {
- pace(TestConfig.pace)
- adminSession.chainBuilder
- }
+ val adminScenario = scenario("AdminConsole").exec(adminSession.chainBuilder)
setUp(adminScenario
- .inject(rampUsers(TestConfig.runUsers) over TestConfig.rampUpPeriod)
- .protocols(httpProtocol))
-
- after {
- filterResults(getClass)
- }
+ .inject(
+ rampUsersPerSec(0.001) to TestConfig.usersPerSec during(TestConfig.rampUpPeriod) ,
+ constantUsersPerSec(TestConfig.usersPerSec) during(TestConfig.warmUpPeriod + TestConfig.measurementPeriod)
+ ).protocols(httpProtocol))
}
diff --git a/testsuite/performance/tests/src/test/scala/keycloak/BasicOIDCSimulation.scala b/testsuite/performance/tests/src/test/scala/keycloak/BasicOIDCSimulation.scala
index 20e2f935d2..250ae921f3 100644
--- a/testsuite/performance/tests/src/test/scala/keycloak/BasicOIDCSimulation.scala
+++ b/testsuite/performance/tests/src/test/scala/keycloak/BasicOIDCSimulation.scala
@@ -12,18 +12,12 @@ import org.keycloak.performance.TestConfig
* @author Radim Vansa <rvansa@redhat.com>
* @author Marko Strukelj <mstrukel@redhat.com>
*/
-class BasicOIDCSimulation extends Simulation {
-
- println()
- println("Target servers: " + TestConfig.serverUrisList)
- println()
-
- println("Using test parameters:\n" + TestConfig.toStringCommonTestParameters);
- println(" refreshTokenCount: " + TestConfig.refreshTokenCount)
- println(" badLoginAttempts: " + TestConfig.badLoginAttempts)
- println()
- println("Using dataset properties:\n" + TestConfig.toStringDatasetProperties)
+class BasicOIDCSimulation extends CommonSimulation {
+ override def printSpecificTestParameters {
+ println(" refreshTokenCount: " + TestConfig.refreshTokenCount)
+ println(" badLoginAttempts: " + TestConfig.badLoginAttempts)
+ }
val httpDefault = http
.acceptHeader("application/json")
@@ -49,18 +43,15 @@ class BasicOIDCSimulation extends Simulation {
.thinkPause()
- val usersScenario = scenario("users")
- .asLongAs(s => rampDownNotStarted(), null, TestConfig.rampDownASAP) {
- pace(TestConfig.pace)
- userSession.chainBuilder
- }
+ val usersScenario = scenario("users").exec(userSession.chainBuilder)
- setUp(usersScenario
- .inject(rampUsers(TestConfig.runUsers) over TestConfig.rampUpPeriod)
- .protocols(httpDefault))
-
- after {
- filterResults(getClass)
- }
+ setUp(usersScenario.inject(
+ rampUsersPerSec(0.001) to TestConfig.usersPerSec during(TestConfig.rampUpPeriod),
+ constantUsersPerSec(TestConfig.usersPerSec) during(TestConfig.warmUpPeriod + TestConfig.measurementPeriod)
+ ).protocols(httpDefault))
+
+// after {
+// filterResults(getClass)
+// }
}
diff --git a/testsuite/performance/tests/src/test/scala/keycloak/CommonScenarioBuilder.scala b/testsuite/performance/tests/src/test/scala/keycloak/CommonScenarioBuilder.scala
index 39cc1a7261..c4b9c87dff 100644
--- a/testsuite/performance/tests/src/test/scala/keycloak/CommonScenarioBuilder.scala
+++ b/testsuite/performance/tests/src/test/scala/keycloak/CommonScenarioBuilder.scala
@@ -3,39 +3,11 @@ package keycloak
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.keycloak.gatling.Predef._
-import keycloak.BasicOIDCScenarioBuilder._
-import org.keycloak.performance.log.LogProcessor
-
-import java.util.concurrent.atomic.AtomicInteger
-
-import io.gatling.core.pause.Normal
-import io.gatling.core.session.Session
-import io.gatling.core.structure.ChainBuilder
-import io.gatling.core.validation.Validation
-import org.jboss.perf.util.Util
-import org.jboss.perf.util.Util.randomUUID
-import org.keycloak.adapters.spi.HttpFacade.Cookie
-import org.keycloak.gatling.AuthorizeAction
-import org.keycloak.performance.TestConfig
-
/**
* @author Tomas Kyjovsky
*/
object CommonScenarioBuilder {
- def rampDownNotStarted(): Validation[Boolean] = {
- System.currentTimeMillis < TestConfig.measurementEndTime
- }
-
- def filterResults(clazz: Class[_]) = {
- if (TestConfig.filterResults) {
- new LogProcessor(clazz).filterLog(
- TestConfig.measurementStartTime,
- TestConfig.measurementEndTime,
- false, true, true)
- }
- }
-
}
diff --git a/testsuite/performance/tests/src/test/scala/keycloak/CommonSimulation.scala b/testsuite/performance/tests/src/test/scala/keycloak/CommonSimulation.scala
new file mode 100644
index 0000000000..2afa96477d
--- /dev/null
+++ b/testsuite/performance/tests/src/test/scala/keycloak/CommonSimulation.scala
@@ -0,0 +1,45 @@
+package keycloak
+
+import io.gatling.core.Predef._
+import io.gatling.http.Predef._
+import keycloak.CommonScenarioBuilder._
+import org.keycloak.performance.log.LogProcessor
+import io.gatling.core.validation.Validation
+
+import org.keycloak.performance.TestConfig
+
+
+/**
+ * @author tomas Kyjovsky <tkyjovsk@redhat.com>
+ */
+abstract class CommonSimulation extends Simulation {
+
+ println()
+ println("Target servers: " + TestConfig.serverUrisList)
+ println()
+ println("Using test parameters:\n" + TestConfig.toStringCommonTestParameters);
+ printSpecificTestParameters
+ println()
+ println("Using dataset properties:\n" + TestConfig.toStringDatasetProperties)
+ println()
+ println("Timestamps: \n" + TestConfig.toStringTimestamps)
+ println()
+
+ def printSpecificTestParameters {
+ // override in subclass
+ }
+
+ def rampDownNotStarted(): Validation[Boolean] = {
+ System.currentTimeMillis < TestConfig.measurementEndTime
+ }
+
+ after {
+ if (TestConfig.filterResults) {
+ new LogProcessor(getClass).filterLog(
+ TestConfig.measurementStartTime,
+ TestConfig.measurementEndTime,
+ false, false)
+ }
+ }
+
+}