From f392bcd9885d52c39fb6de2efdac4de03171aeb3 Mon Sep 17 00:00:00 2001 From: Lev Kokotov Date: Fri, 16 Jan 2026 08:50:38 -0800 Subject: [PATCH] Race condition search --- integration/fast_write_read/.gitignore | 2 + integration/fast_write_read/README.md | 6 ++ integration/fast_write_read/index.js | 120 +++++++++++++++++++++++ integration/fast_write_read/package.json | 13 +++ integration/fast_write_read/pgdog.toml | 6 ++ integration/fast_write_read/users.toml | 4 + 6 files changed, 151 insertions(+) create mode 100644 integration/fast_write_read/.gitignore create mode 100644 integration/fast_write_read/README.md create mode 100644 integration/fast_write_read/index.js create mode 100644 integration/fast_write_read/package.json create mode 100644 integration/fast_write_read/pgdog.toml create mode 100644 integration/fast_write_read/users.toml diff --git a/integration/fast_write_read/.gitignore b/integration/fast_write_read/.gitignore new file mode 100644 index 00000000..504afef8 --- /dev/null +++ b/integration/fast_write_read/.gitignore @@ -0,0 +1,2 @@ +node_modules/ +package-lock.json diff --git a/integration/fast_write_read/README.md b/integration/fast_write_read/README.md new file mode 100644 index 00000000..ca4c8b6a --- /dev/null +++ b/integration/fast_write_read/README.md @@ -0,0 +1,6 @@ +# Race condition search + +1. `cd` into this directory. +2. In psql, run `CREATE DATABASE pgdog`, `CREATE USER pgdog PASSWORD 'pgdog' LOGIN` (if not exists already). +3. `cargo run --release --bin pgdog` (release is faster, helps find race condtions). +4. In a different terminal, run `npm start`. diff --git a/integration/fast_write_read/index.js b/integration/fast_write_read/index.js new file mode 100644 index 00000000..6119e3de --- /dev/null +++ b/integration/fast_write_read/index.js @@ -0,0 +1,120 @@ +import { Sequelize, DataTypes } from "sequelize"; + +const sequelize = new Sequelize("postgres://pgdog:pgdog@127.0.0.1:6432/pgdog", { + logging: false, + disableClsTransactions: true, + dialectOptions: { + autocommit: true, + }, + pool: { + max: 20, + min: 20, + acquire: 60000, + idle: 10000, + }, +}); + +const Author = sequelize.define("Author", { + id: { + type: DataTypes.INTEGER, + primaryKey: true, + autoIncrement: true, + }, + name: { + type: DataTypes.STRING, + allowNull: false, + }, +}); + +const Book = sequelize.define("Book", { + id: { + type: DataTypes.INTEGER, + primaryKey: true, + autoIncrement: true, + }, + title: { + type: DataTypes.STRING, + allowNull: false, + }, + authorId: { + type: DataTypes.INTEGER, + allowNull: false, + references: { + model: Author, + key: "id", + }, + }, +}); + +Author.hasMany(Book, { foreignKey: "authorId" }); +Book.belongsTo(Author, { foreignKey: "authorId" }); + +async function insertAuthorAndBook(i) { + const author = await Author.create({ name: `Author ${i}` }); + const book = await Book.create({ title: `Book ${i}`, authorId: author.id }); + return { author, book }; +} + +async function main() { + await sequelize.sync({ force: true }); + + const iterations = 500000; + const batchSize = 1000; + + console.log(`Starting ${iterations} insert pairs in batches of ${batchSize} (pool: 20)`); + + let completed = 0; + let errors = 0; + const errorDetails = {}; + const start = Date.now(); + + for (let batch = 0; batch < iterations; batch += batchSize) { + const batchEnd = Math.min(batch + batchSize, iterations); + const promises = []; + + for (let i = batch; i < batchEnd; i++) { + promises.push( + insertAuthorAndBook(i) + .then(() => { + completed++; + }) + .catch((err) => { + errors++; + const key = err.message.substring(0, 60); + errorDetails[key] = (errorDetails[key] || 0) + 1; + if (errors <= 10) { + console.error(`Error on iteration ${i}: ${err.message}`); + } + }), + ); + } + + await Promise.allSettled(promises); + + const elapsed = ((Date.now() - start) / 1000).toFixed(1); + const rate = (completed / (Date.now() - start) * 1000).toFixed(0); + console.log(`Batch ${batch / batchSize + 1}: ${completed}/${iterations} (${elapsed}s, ${rate}/s, errors: ${errors})`); + } + + const elapsed = ((Date.now() - start) / 1000).toFixed(1); + console.log(`\nFinished in ${elapsed}s`); + console.log(`Results: Completed: ${completed}, Errors: ${errors}`); + + if (Object.keys(errorDetails).length > 0) { + console.log("Error breakdown:"); + for (const [msg, count] of Object.entries(errorDetails)) { + console.log(` ${msg}...: ${count}`); + } + } + + const authorCount = await Author.count(); + const bookCount = await Book.count(); + console.log(`Authors: ${authorCount}, Books: ${bookCount}`); + + await sequelize.close(); +} + +main().catch((err) => { + console.error(err); + process.exit(1); +}); diff --git a/integration/fast_write_read/package.json b/integration/fast_write_read/package.json new file mode 100644 index 00000000..bee2d105 --- /dev/null +++ b/integration/fast_write_read/package.json @@ -0,0 +1,13 @@ +{ + "name": "fast_write_read", + "version": "1.0.0", + "type": "module", + "main": "index.js", + "scripts": { + "start": "node index.js" + }, + "dependencies": { + "pg": "^8.14.1", + "sequelize": "^6.37.5" + } +} diff --git a/integration/fast_write_read/pgdog.toml b/integration/fast_write_read/pgdog.toml new file mode 100644 index 00000000..48a87f5d --- /dev/null +++ b/integration/fast_write_read/pgdog.toml @@ -0,0 +1,6 @@ +[[databases]] +name = "pgdog" +host = "127.0.0.1" + +[admin] +password = "pgdog" diff --git a/integration/fast_write_read/users.toml b/integration/fast_write_read/users.toml new file mode 100644 index 00000000..581cdb75 --- /dev/null +++ b/integration/fast_write_read/users.toml @@ -0,0 +1,4 @@ +[[users]] +name = "pgdog" +database = "pgdog" +password = "pgdog"