/
Main.js
229 lines (207 loc) · 8.31 KB
/
Main.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
var fs = require('fs');
var os = require('os');
var Config = require("./Config");
var Ingestion = new (require("./Ingestion"))();
var TestSequence = require("./TestSequence.json");
var Utils = require("./Utils");
var AppConnector = require("./AppConnector");
var successfulRun = true;
let startTime = null;
let perfMode = process.argv.indexOf("-perfmode") !== -1;
const skipAsyncHooksTests = () => {
var version = process.versions.node.split(".");
if (version[0] == 8 && version[1] == 0 && version[2] == 0) return true;
return version[0] < 8;
}
// Helpers
const runTestSequence = (index) => {
const testIndex = index || 0;
if (testIndex == 0) {
Utils.Logging.enterSubunit("Triggering runs for each test sequence");
}
if (TestSequence.length == testIndex) {
const waitTime = Config.WaitTime;
Utils.Logging.exitSubunit();
Utils.Logging.info("Waiting " + waitTime + "ms for telemetry...");
return new Promise((resolve, reject) => setTimeout(resolve, waitTime));
} else if (skipAsyncHooksTests() && TestSequence[testIndex].path.indexOf("/~") === 0) {
console.log("Skipping test for", TestSequence[testIndex].path)
return runTestSequence(testIndex + 1);
} else {
// Underscores indicate internal sequences
if (TestSequence[testIndex].path.indexOf("/_") === 0) {
return runTestSequence(testIndex + 1);
}
return AppConnector.runTest(TestSequence[testIndex].path).then(() => runTestSequence(testIndex + 1));
}
};
const validateTestSequence = (index) => {
const testIndex = index || 0;
if (testIndex == 0) {
Utils.Logging.enterSubunit("Validating telemetry reported for all steps of each test sequence");
}
if (TestSequence.length == testIndex) {
Utils.Logging.exitSubunit();
return Promise.resolve(true);
} else if (skipAsyncHooksTests() && TestSequence[testIndex].path.indexOf("/~") === 0) {
console.log("Skipping validation for", TestSequence[testIndex].path)
return validateTestSequence(testIndex + 1);
} else {
// Underscores indicate internal sequences
if (TestSequence[testIndex].path.indexOf("/_") === 0) {
return validateTestSequence(testIndex + 1);
}
return Ingestion.testValidator.validateTest(TestSequence[testIndex]).then((success)=>{
if (!success) {
successfulRun = false;
}
return validateTestSequence(testIndex + 1);
});
}
};
const runAndValidateLongTest = () => {
Utils.Logging.enterSubunit("Performing parallel requests test sequence for " + Config.StressTestTime + "ms");
// Find stress test
let testSequence = null;
for (let i = 0; i < TestSequence.length; i++) {
let sequence = TestSequence[i];
if (sequence.path === "/_longRunTest") {
testSequence = sequence;
break;
}
}
// Don't continue if we don't have one
if (!testSequence) {
Utils.Logging.info("No parallel test sequence defined. Skipping.");
Utils.Logging.exitSubunit();
return Promise.resolve(false);
} else if (!successfulRun) {
Utils.Logging.info("Standard tests failed. Skipping.");
Utils.Logging.exitSubunit();
return Promise.resolve(false);
}
const startStressTime = new Date();
let attemptCounter = 0;
const stressLoop = () => {
const elapsed = new Date() - startStressTime;
if (elapsed > Config.StressTestTime) {
return Promise.resolve(true);
}
attemptCounter++;
return AppConnector.runTest(testSequence.path, true).then(stressLoop);
};
const waitForTelemetry = () => {
const waitTime = Config.WaitTime;
Utils.Logging.info("Waiting " + waitTime + "ms for telemetry...");
return new Promise((resolve, reject) => setTimeout(resolve, waitTime));
}
// Run 10 stress loops in parallel
// This is for higher load and to test correlation under non-sequential tasks
const stressLoops = [stressLoop(),stressLoop(),stressLoop(),stressLoop(),stressLoop(),
stressLoop(),stressLoop(),stressLoop(),stressLoop(),stressLoop()];
return Promise.all(stressLoops).then((success) => {
if (!success) {
throw new Error("Could not complete stress test!");
return Promise.resolve(false);
}
}).then(waitForTelemetry).then(()=>{
Utils.Logging.enterSubunit("Expecting " + attemptCounter + " requests and all associated nested telemetry");
// Get all request item operation ids
const stressTelemetry = Ingestion.telemetry["RequestData"].filter((v) => {
return v.data.baseData.name == "GET " + testSequence.path;
});
const distinctOpIds = Array.from(new Set(stressTelemetry.map(item => item.tags["ai.operation.id"])));
// Validate number of operations
if (distinctOpIds.length != attemptCounter) {
Utils.Logging.error("FAILED EXPECTATION - " + distinctOpIds.length + " distinct operations instead of " + attemptCounter + "!");
successfulRun = false;
Utils.Logging.exitSubunit();
Utils.Logging.exitSubunit();
return Promise.resolve(false);
}
const validationLoop = (i) => {
if (i >= distinctOpIds.length) {
Utils.Logging.success("Test PASSED!");
return true;
}
return Ingestion.testValidator.validateTest(testSequence, distinctOpIds[i], true).then(res => {
if (!res) {
successfulRun = false;
Utils.Logging.error("Test FAILED on item "+i+"!");
return false;
} else {
return validationLoop(i + 1);
}
});
}
return validationLoop(0).then(()=>{
Utils.Logging.exitSubunit();
Utils.Logging.exitSubunit();
});
});
}
const validatePerfCounters = () => {
const perfTypes = ["\\Processor(_Total)\\% Processor Time",
"\\Process(??APP_WIN32_PROC??)\\% Processor Time",
"\\Process(??APP_WIN32_PROC??)\\Private Bytes",
"\\Memory\\Available Bytes",
"\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Requests/Sec",
"\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Request Execution Time"
];
const expectedEach = Math.floor((new Date() - startTime) / Config.PerfCounterFrequency);
return Ingestion.testValidator.validatePerfCounters(perfTypes, expectedEach).then((success) => {
if (!success) {
successfulRun = false;
}
return success;
});
};
const getCPU = () => {
const cpus = os.cpus();
let totalIdle = 0, totalTick = 0;
for(let i = 0, len = cpus.length; i < len; i++) {
let cpu = cpus[i];
for(let type in cpu.times) {
totalTick += cpu.times[type];
}
totalIdle += cpu.times.idle;
}
return {idle: totalIdle / cpus.length, total: totalTick / cpus.length};
}
// Main runner
Ingestion.enable();
if (!perfMode) {
AppConnector.startConnection(TestSequence)
.then(() => { startTime = new Date(); })
.then(runTestSequence)
.then(validateTestSequence)
.then(runAndValidateLongTest)
.then(validatePerfCounters)
.then(AppConnector.closeConnection)
.then(() => {
Ingestion.disable();
Utils.Logging.info("Test run done!");
if (successfulRun) {
Utils.Logging.success("All tests PASSED!");
} else {
Utils.Logging.error("At least one test FAILED!");
}
process.exit(successfulRun ? 0: 1);
})
.catch((e) => {
Utils.Logging.error("Error thrown!");
Utils.Logging.error(e.stack || e);
process.exit(1);
});
} else {
AppConnector.startConnection(TestSequence);
let lastCPU = getCPU();
setInterval(()=>{
let newCPU = getCPU();
let idleDifference = newCPU.idle - lastCPU.idle;
let totalDifference = newCPU.total - lastCPU.total;
Utils.Logging.info("Telemetry Items: " + Ingestion.telemetryCount);
Utils.Logging.info("CPU Usage: " + (100 - ~~(100 * idleDifference / totalDifference)) + "%");
lastCPU = newCPU;
}, 30 * 1000);
}