Skip to content

Commit

Permalink
fix(tasks): don't upload log files to CodSpeed (#2762)
Browse files Browse the repository at this point in the history
Improvement to #2751. CodSpeed have advised no need to upload the log
files.
  • Loading branch information
overlookmotel committed Mar 18, 2024
1 parent 5edd196 commit b82a5dd
Show file tree
Hide file tree
Showing 2 changed files with 65 additions and 69 deletions.
65 changes: 41 additions & 24 deletions tasks/benchmark/codspeed/capture.mjs
Expand Up @@ -9,47 +9,64 @@
* then shuts itself down.
*/

import fs from 'fs';
import {createWriteStream} from 'fs';
import fs from 'fs/promises';
import {join as pathJoin} from 'path';
import {pipeline} from 'stream/promises';
import express from 'express';
import tar from 'tar';

const DEFAULT_PORT = 3000,
LISTEN_ATTEMPTS = 10;

// Create directory for saving assets
const rand = Math.round(Math.random() * 1000000000000000000).toString(16),
dataDir = `/tmp/oxc_bench_data_${rand}`;
fs.mkdirSync(dataDir);
await fs.mkdir(dataDir);

let component = process.env.COMPONENT;
if (process.env.FIXTURE) component += process.env.FIXTURE;

const app = express();

app.post('/upload', (req, res, next) => {
saveBody(req, 'metadata.json', next, () => {
res.json({
status: 'success',
uploadUrl: `http://localhost:${port}/upload_archive`,
runId: 'dummy_value',
});
});
});
const wrapHandler = fn => (req, res, next) => { fn(req, res).catch(next); };
const getFilePath = filename => pathJoin(dataDir, `${component}_${filename}`);

app.put('/upload_archive', (req, res, next) => {
saveBody(req, 'archive.tar.gz', next, () => {
res.send('OK');
server.close(() => {});
app.post('/upload', wrapHandler(async (req, res) => {
const stream = createWriteStream(getFilePath('metadata.json'));
await pipeline(req, stream);

res.json({
status: 'success',
uploadUrl: `http://localhost:${port}/upload_archive`,
runId: 'dummy_value',
});
});
}));

function saveBody(req, filename, onError, done) {
(async () => {
const stream = fs.createWriteStream(`${dataDir}/${component}_${filename}`);
await pipeline(req, stream);
done();
})().catch(onError);
}
app.put('/upload_archive', wrapHandler(async (req, res) => {
// Stream uploaded tarball to file
const path = getFilePath('archive.tar.gz');
const stream = createWriteStream(path);
await pipeline(req, stream);

// Untar contents + delete tarball
await tar.extract({file: path, cwd: dataDir});
await fs.rm(path);

// Rename `.out` files + delete `.log` files
const filenames = await fs.readdir(dataDir);
for (const filename of filenames) {
if (filename.endsWith('.log')) {
await fs.rm(pathJoin(dataDir, filename));
} else if (filename.endsWith('.out')) {
await fs.rename(pathJoin(dataDir, filename), getFilePath(filename));
}
}

// Send response
res.send('');
server.close(() => {});
}));

// Open server on a port which is not already in use
let server,
Expand All @@ -71,4 +88,4 @@ for (let i = 0; i < LISTEN_ATTEMPTS; i++) {
console.log(`Server listening on port ${port}`);

// Output data dir path + port to env vars
fs.appendFileSync(process.env.GITHUB_ENV, `DATA_DIR=${dataDir}\nINTERCEPT_PORT=${port}\n`);
await fs.appendFile(process.env.GITHUB_ENV, `DATA_DIR=${dataDir}\nINTERCEPT_PORT=${port}\n`);
69 changes: 24 additions & 45 deletions tasks/benchmark/codspeed/upload.mjs
Expand Up @@ -10,29 +10,26 @@ import assert from 'assert';
import tar from 'tar';
import axios from 'axios';

const METADATA_SUFFIX = 'metadata.json',
ARCHIVE_SUFFIX = `archive.tar.gz`,
const METADATA_SUFFIX = '_metadata.json',
CODSPEED_UPLOAD_URL = 'https://api.codspeed.io/upload';

const dataDir = process.env.DATA_DIR,
token = process.env.CODSPEED_TOKEN;

// Get list of components
const components = (await fs.readdir(dataDir))
.filter(filename => filename.endsWith(METADATA_SUFFIX))
.map(filename => filename.slice(0, -METADATA_SUFFIX.length - 1));

// Unzip tarballs
const unzipDir = pathJoin(dataDir, 'unzip');
await fs.mkdir(unzipDir);

for (const component of components) {
console.log(`Unzipping profile data: ${component}`);
const archivePath = pathJoin(dataDir, `${component}_${ARCHIVE_SUFFIX}`);
const componentUnzipDir = pathJoin(unzipDir, component);
await fs.mkdir(componentUnzipDir);
await tar.extract({file: archivePath, cwd: componentUnzipDir});
await fs.rm(archivePath);
// Find profile files and first metadata file
const profileFiles = [];
let metadataPath;
for (const filename of await fs.readdir(dataDir)) {
const path = pathJoin(dataDir, filename);
if (filename.endsWith(METADATA_SUFFIX)) {
if (!metadataPath) metadataPath = path;
} else {
const match = filename.match(/_(\d+)\.out$/);
assert(match, `Unexpected file: ${filename}`);

const pid = +match[1];
profileFiles.push({pid, path});
}
}

// Move all `.out` files to one directory
Expand All @@ -44,25 +41,14 @@ await fs.mkdir(outDir);
const pids = new Set(),
duplicates = [];
let highestPid = -1;
for (const component of components) {
const componentDir = pathJoin(unzipDir, component);
const outFiles = await fs.readdir(componentDir);
for (const filename of outFiles) {
if (!filename.endsWith('.out')) continue;
let pid = filename.slice(0, -4);
assert(/^\d+$/.test(pid), `Unexpected file: ${component}/${filename}`);
pid *= 1;

const path = pathJoin(componentDir, filename);
if (pids.has(pid)) {
// Duplicate PID
duplicates.push({pid, path});
} else {
pids.add(pid);
if (pid > highestPid) highestPid = pid;

await fs.rename(path, pathJoin(outDir, `${pid}.out`));
}
for (const {pid, path} of profileFiles) {
if (pids.has(pid)) {
// Duplicate PID
duplicates.push({pid, path});
} else {
pids.add(pid);
if (pid > highestPid) highestPid = pid;
await fs.rename(path, pathJoin(outDir, `${pid}.out`));
}
}

Expand All @@ -83,11 +69,6 @@ for (let {pid, path} of duplicates) {
await fs.rm(path);
}

// Add log files to output dir
for (const filename of ['runner.log', 'valgrind.log']) {
await fs.rename(pathJoin(unzipDir, components[0], filename), pathJoin(outDir, filename));
}

// ZIP combined profile directory
console.log('Zipping combined profile directory');
const archivePath = pathJoin(dataDir, 'archive.tar.gz');
Expand All @@ -105,9 +86,7 @@ for await (const chunk of inputStream) {
const md5 = hash.digest('base64');

// Alter MD5 hash in metadata object
const metadata = JSON.parse(
await fs.readFile(pathJoin(dataDir, `${components[0]}_${METADATA_SUFFIX}`), 'utf8')
);
const metadata = JSON.parse(await fs.readFile(metadataPath, 'utf8'));
metadata.profileMd5 = md5;

// If no token, set `metadata.tokenless`, and log hash of metadata JSON.
Expand Down

0 comments on commit b82a5dd

Please sign in to comment.