Skip to content

Commit

Permalink
Added query-runner
Browse files Browse the repository at this point in the history
  • Loading branch information
allouis committed Dec 19, 2024
1 parent 6b6d4c3 commit 0ba8fc3
Show file tree
Hide file tree
Showing 11 changed files with 820 additions and 0 deletions.
22 changes: 22 additions & 0 deletions cedar/query-runner/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
FROM --platform=linux/amd64 node:20.18.0-alpine

WORKDIR /opt/query-runner

COPY package.json .
COPY yarn.lock .

RUN yarn && \
yarn cache clean

COPY tsconfig.json .

COPY queries ./queries

COPY src ./src

ENV NODE_ENV=production
RUN yarn build

EXPOSE 8080

CMD ["node", "dist/app.js"]
17 changes: 17 additions & 0 deletions cedar/query-runner/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# build

```
docker build . -t query-runner:latest
```

# run

```
docker run --rm -e MYSQL_HOST=<ip> -e MYSQL_USER=<user> -e MYSQL_PASSWORD=<pass> -e MYSQL_DATABASE=activitypub_061224 query-runner
```

# build & run

```
docker build . -t query-runner:latest && docker run --rm -e MYSQL_HOST=<ip> -e MYSQL_USER=<user> -e MYSQL_PASSWORD=<pass> -e MYSQL_DATABASE=activitypub_061224 query-runner
```
24 changes: 24 additions & 0 deletions cedar/query-runner/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
"name": "query-runner",
"private": true,
"version": "0.0.0",
"author": "Ghost Foundation",
"license": "UNLICENSED",
"main": "src/app.ts",
"type": "module",
"scripts": {
"build": "esbuild src/app.ts --sourcemap --platform=neutral --packages=external --bundle --outfile=dist/app.js",
"build:watch": "concurrently \"yarn build --watch\" \"node --watch dist/app.js\""
},
"files": ["src"],
"devDependencies": {
"concurrently": "9.1.0",
"esbuild": "0.24.0",
"typescript": "5.7.2"
},
"dependencies": {
"bottleneck": "2.19.5",
"mysql2": "3.11.5",
"percentile": "1.6.0"
}
}
25 changes: 25 additions & 0 deletions cedar/query-runner/queries/read-feed.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
SELECT
posts.title AS post_title,
posts.content AS post_content,
posts.type AS post_type,
accounts.name AS author_name,
accounts.username AS author_username,
likes.user_id AS liked_by_user_id,
follows.follower_id AS followed_by_user_id
FROM
feeds
INNER JOIN posts
ON feeds.post_id = posts.internal_id
INNER JOIN accounts
ON posts.author_id = accounts.internal_id
LEFT JOIN likes
ON likes.post_id = posts.internal_id
AND likes.user_id = feeds.user_id
LEFT JOIN follows
ON follows.following_id = feeds.user_id
AND follows.follower_id = feeds.author_id
WHERE
feeds.user_id = 189856
ORDER BY feeds.internal_id DESC
LIMIT 50
OFFSET 0;
14 changes: 14 additions & 0 deletions cedar/query-runner/queries/read-followers.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
SELECT
accounts.name AS follower_name,
accounts.username AS follower_username,
accounts.description AS follower_description,
accounts.icon AS follower_icon
FROM
follows
INNER JOIN accounts ON follows.follower_id = accounts.internal_id
WHERE
follows.following_id = 1
ORDER BY
follows.internal_id DESC
LIMIT 50
OFFSET 0;
14 changes: 14 additions & 0 deletions cedar/query-runner/queries/read-following.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
SELECT
accounts.name AS following_name,
accounts.username AS following_username,
accounts.description AS following_description,
accounts.icon AS following_icon
FROM
follows
INNER JOIN accounts ON follows.following_id = accounts.internal_id
WHERE
follows.follower_id = 1
ORDER BY
follows.internal_id DESC
LIMIT 50
OFFSET 0;
13 changes: 13 additions & 0 deletions cedar/query-runner/queries/read-outbox.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
SELECT
posts.title AS post_title,
posts.content AS post_content,
posts.type AS post_type
FROM
posts
WHERE
posts.author_id = 1
AND posts.type = 2
ORDER BY
posts.internal_id DESC
LIMIT 50
OFFSET 0;
18 changes: 18 additions & 0 deletions cedar/query-runner/queries/write-post.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
BEGIN;

INSERT INTO posts (title, content, author_id, `type`) VALUES ('Inserted post', 'Hello, world!', 456, 1);

INSERT INTO feeds (user_id, post_id, author_id, `type`)
SELECT
users.internal_id AS user_id,
LAST_INSERT_ID() AS post_id,
123 AS author_id,
1 AS type
FROM
follows
JOIN users
ON follows.follower_id = users.account_id
WHERE
follows.following_id = 456;

COMMIT;
141 changes: 141 additions & 0 deletions cedar/query-runner/src/app.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
import fs from 'node:fs/promises';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import Bottleneck from 'bottleneck';
import mysql from 'mysql2/promise';
import percentile from 'percentile';

const SERIES_RUNS = Number.parseInt(process.env.SERIES_RUNS || '5');
const PARALLEL_RUN_DURATION = Number.parseInt(
process.env.PARALLEL_RUN_DURATION || '10', // seconds
);
const PARALLEL_QUERIES_PER_SECOND = Number.parseInt(
process.env.QUERIES_PER_SECOND || '10',
);
const PARALLEL_RUNS = PARALLEL_QUERIES_PER_SECOND * PARALLEL_RUN_DURATION;

const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);

const pool = mysql.createPool({
host: process.env.MYSQL_HOST,
user: process.env.MYSQL_USER,
password: process.env.MYSQL_PASSWORD,
database: process.env.MYSQL_DATABASE,
namedPlaceholders: true,
multipleStatements: true,
});

const limiter = new Bottleneck({
minTime: 1000 / PARALLEL_QUERIES_PER_SECOND,
});

const warmupPool = async () => {
console.time('Warmup');
await Promise.all(
Array(100)
.fill(0)
.map(() => pool.query('SELECT 1')),
);
console.timeEnd('Warmup');
};

const timeQuery = async (query: string, args: any[]) => {
//: { [key: string]: string }) => {
const start = performance.now();
await pool.query(query, args);
const end = performance.now();
return end - start;
};

const runQuery = async (query: string, args: any[]) => {
// { [key: string]: string }) => {
const runTimes: number[] = Array(SERIES_RUNS).fill(
Number.POSITIVE_INFINITY,
);

for (let run = 0; run < SERIES_RUNS; run++) {
runTimes[run] = await timeQuery(query, args);
}

const parallelRuns: Promise<number>[] = Array(PARALLEL_RUNS).fill(
Promise.resolve(Number.POSITIVE_INFINITY),
);
for (let run = 0; run < PARALLEL_RUNS; run++) {
parallelRuns[run] = limiter.schedule(() => timeQuery(query, args));
}

const parallelRunTimes = await Promise.all(parallelRuns);

return {
runTimes,
parallelRunTimes,
};
};

const loadQueries = async () => {
const queriesDir = path.join(__dirname, '../queries');
const files = await fs.readdir(queriesDir);
const sqlFiles = files.filter((file) => file.endsWith('.sql'));

const queries: { [key: string]: string } = {};
for (const file of sqlFiles) {
const filePath = path.join(queriesDir, file);
const content = await fs.readFile(filePath, 'utf-8');
const key = path.basename(file, '.sql');
queries[key] = content;
}
return queries;
};

const params = new Map(
Object.entries({
/*
'read-feed': {
user_id: '189856',
},
*/
'read-feed': [189856],
}),
);

const queries = await loadQueries();

const queryResults: Record<
string,
{ runTimes: number[]; parallelRunTimes: number[] }
> = {};

await warmupPool();

for (const queryName in queries) {
if (process.env.QUERY && process.env.QUERY !== queryName) {
continue;
}
const query = queries[queryName];
const results = await runQuery(query, params.get(queryName) || []);
queryResults[queryName] = results;

console.log('\n');

// calculate P50, P90, P99, P100
const percentiles = [50, 90, 99, 100];
console.log(
`${queryName} - S: ${percentiles
.map((p) => percentile(p, results.runTimes) as number)
.map((r, index) => `P${percentiles[index]}: ${r.toFixed(2)}ms`)
.join(', ')}`,
);
console.log(
`${queryName} - P: ${percentiles
.map((p) => percentile(p, results.parallelRunTimes) as number)
.map((r, index) => `P${percentiles[index]}: ${r.toFixed(2)}ms`)
.join(', ')}`,
);

console.log('\n');
}

console.log(JSON.stringify(queryResults));

await pool.end();
Loading

0 comments on commit 0ba8fc3

Please # to comment.