From e5d46749c0e41498da8451622dd3ed2218f717bd Mon Sep 17 00:00:00 2001 From: "Brian M. Carlson" Date: Wed, 18 Dec 2019 23:44:43 -0600 Subject: [PATCH 01/10] Work in progress...convert to more efficient reader --- .gitignore | 1 + packages/pg-packet-stream/package.json | 22 + packages/pg-packet-stream/src/index.test.ts | 103 +++ packages/pg-packet-stream/src/index.ts | 177 +++++ .../pg-packet-stream/src/types/chunky.d.ts | 1 + packages/pg-packet-stream/tsconfig.json | 23 + packages/pg/bench.js | 42 ++ packages/pg/lib/client.js | 7 +- packages/pg/lib/connection-fast.js | 709 ++++++++++++++++++ packages/pg/lib/result.js | 24 +- packages/pg/package.json | 1 + .../pg/test/integration/client/api-tests.js | 93 +-- yarn.lock | 107 ++- 13 files changed, 1249 insertions(+), 61 deletions(-) create mode 100644 packages/pg-packet-stream/package.json create mode 100644 packages/pg-packet-stream/src/index.test.ts create mode 100644 packages/pg-packet-stream/src/index.ts create mode 100644 packages/pg-packet-stream/src/types/chunky.d.ts create mode 100644 packages/pg-packet-stream/tsconfig.json create mode 100644 packages/pg/bench.js create mode 100644 packages/pg/lib/connection-fast.js diff --git a/.gitignore b/.gitignore index 88b672dd3..56eba3953 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ build/ node_modules/ package-lock.json *.swp +dist diff --git a/packages/pg-packet-stream/package.json b/packages/pg-packet-stream/package.json new file mode 100644 index 000000000..89027056b --- /dev/null +++ b/packages/pg-packet-stream/package.json @@ -0,0 +1,22 @@ +{ + "name": "pg-packet-stream", + "version": "1.0.0", + "main": "dist/index.js", + "license": "MIT", + "devDependencies": { + "@types/node": "^12.12.21", + "chunky": "^0.0.0", + "mocha": "^6.2.2", + "typescript": "^3.7.3" + }, + "scripts": { + "test": "mocha -r ts-node/register src/**/*.test.ts" + }, + "dependencies": { + "@types/chai": "^4.2.7", + "@types/mocha": "^5.2.7", + "chai": "^4.2.0", + "mocha": "^6.2.2", + "ts-node": "^8.5.4" + } +} diff --git a/packages/pg-packet-stream/src/index.test.ts b/packages/pg-packet-stream/src/index.test.ts new file mode 100644 index 000000000..f5be4e2a0 --- /dev/null +++ b/packages/pg-packet-stream/src/index.test.ts @@ -0,0 +1,103 @@ +import 'mocha'; +import { PgPacketStream, Packet } from './' +import { expect } from 'chai' +import chunky from 'chunky' + +const consume = async (stream: PgPacketStream, count: number): Promise => { + const result: Packet[] = []; + + return new Promise((resolve) => { + const read = () => { + stream.once('readable', () => { + let packet; + while (packet = stream.read()) { + result.push(packet) + } + if (result.length === count) { + resolve(result); + } else { + read() + } + + }) + } + read() + }) +} + +const emptyMessage = Buffer.from([0x0a, 0x00, 0x00, 0x00, 0x04]) +const oneByteMessage = Buffer.from([0x0b, 0x00, 0x00, 0x00, 0x05, 0x0a]) +const bigMessage = Buffer.from([0x0f, 0x00, 0x00, 0x00, 0x14, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e0, 0x0f]) + +describe('PgPacketStream', () => { + it('should chunk a perfect input packet', async () => { + const stream = new PgPacketStream() + stream.write(Buffer.from([0x01, 0x00, 0x00, 0x00, 0x04])) + stream.end() + const buffers = await consume(stream, 1) + expect(buffers).to.have.length(1) + expect(buffers[0].packet).to.deep.equal(Buffer.from([0x1, 0x00, 0x00, 0x00, 0x04])) + }); + + it('should read 2 chunks into perfect input packet', async () => { + const stream = new PgPacketStream() + stream.write(Buffer.from([0x01, 0x00, 0x00, 0x00, 0x08])) + stream.write(Buffer.from([0x1, 0x2, 0x3, 0x4])) + stream.end() + const buffers = await consume(stream, 1) + expect(buffers).to.have.length(1) + expect(buffers[0].packet).to.deep.equal(Buffer.from([0x1, 0x00, 0x00, 0x00, 0x08, 0x1, 0x2, 0x3, 0x4])) + }); + + it('should read a bunch of big messages', async () => { + const stream = new PgPacketStream(); + let totalBuffer = Buffer.allocUnsafe(0); + const num = 2; + for (let i = 0; i < 2; i++) { + totalBuffer = Buffer.concat([totalBuffer, bigMessage, bigMessage]) + } + const chunks = chunky(totalBuffer) + for (const chunk of chunks) { + stream.write(chunk) + } + stream.end() + const messages = await consume(stream, num * 2) + expect(messages.map(x => x.code)).to.eql(new Array(num * 2).fill(0x0f)) + }) + + it('should read multiple messages in a single chunk', async () => { + const stream = new PgPacketStream() + stream.write(Buffer.from([0x01, 0x00, 0x00, 0x00, 0x04, 0x01, 0x00, 0x00, 0x00, 0x04])) + stream.end() + const buffers = await consume(stream, 2) + expect(buffers).to.have.length(2) + expect(buffers[0].packet).to.deep.equal(Buffer.from([0x1, 0x00, 0x00, 0x00, 0x04])) + expect(buffers[1].packet).to.deep.equal(Buffer.from([0x1, 0x00, 0x00, 0x00, 0x04])) + }); + + it('should read multiple chunks into multiple packets', async () => { + const stream = new PgPacketStream() + stream.write(Buffer.from([0x01, 0x00, 0x00, 0x00, 0x05, 0x0a, 0x01, 0x00, 0x00, 0x00, 0x05, 0x0b])) + stream.write(Buffer.from([0x01, 0x00, 0x00])); + stream.write(Buffer.from([0x00, 0x06, 0x0c, 0x0d, 0x03, 0x00, 0x00, 0x00, 0x04])) + stream.end() + const buffers = await consume(stream, 4) + expect(buffers).to.have.length(4) + expect(buffers[0].packet).to.deep.equal(Buffer.from([0x1, 0x00, 0x00, 0x00, 0x05, 0x0a])) + expect(buffers[1].packet).to.deep.equal(Buffer.from([0x1, 0x00, 0x00, 0x00, 0x05, 0x0b])) + expect(buffers[2].packet).to.deep.equal(Buffer.from([0x1, 0x00, 0x00, 0x00, 0x06, 0x0c, 0x0d])) + expect(buffers[3].packet).to.deep.equal(Buffer.from([0x3, 0x00, 0x00, 0x00, 0x04])) + }); + + it('reads packet that spans multiple chunks', async () => { + const stream = new PgPacketStream() + stream.write(Buffer.from([0x0d, 0x00, 0x00, 0x00])) + stream.write(Buffer.from([0x09])) // length + stream.write(Buffer.from([0x0a, 0x0b, 0x0c, 0x0d])) + stream.write(Buffer.from([0x0a, 0x0b, 0x0c, 0x0d])) + stream.write(Buffer.from([0x0a, 0x0b, 0x0c, 0x0d])) + stream.end() + const buffers = await consume(stream, 1) + expect(buffers).to.have.length(1) + }) +}); diff --git a/packages/pg-packet-stream/src/index.ts b/packages/pg-packet-stream/src/index.ts new file mode 100644 index 000000000..95f0e82c6 --- /dev/null +++ b/packages/pg-packet-stream/src/index.ts @@ -0,0 +1,177 @@ +import { Transform, TransformCallback, TransformOptions } from 'stream'; +import assert from 'assert' + +export const hello = () => 'Hello world!' + +// this is a single byte +const CODE_LENGTH = 1; +// this is a Uint32 +const LEN_LENGTH = 4; + +export type Packet = { + code: number; + packet: Buffer; +} + +type FieldFormat = "text" | "binary" + +class Field { + constructor(public name: string) { + + } + +} + +const emptyBuffer = Buffer.allocUnsafe(0); + +class BufferReader { + private buffer: Buffer = emptyBuffer; + constructor(private offset: number = 0) { + + } + + public setBuffer(offset: number, buffer: Buffer): void { + this.offset = offset; + this.buffer = buffer; + } + + public int16() { + const result = this.buffer.readInt16BE(this.offset); + this.offset += 2; + return result; + } + + public int32() { + const result = this.buffer.readInt32BE(this.offset); + this.offset += 4; + return result; + } + + public string(length: number): string { + // TODO(bmc): support non-utf8 encoding + const result = this.buffer.toString('utf8', this.offset, this.offset + length) + this.offset += length; + return result; + } + + public bytes(length: number): Buffer { + const result = this.buffer.slice(this.offset, this.offset + length); + this.offset += length; + return result + } +} + +type Mode = 'text' | 'binary'; + +type StreamOptions = TransformOptions & { + mode: Mode +} + +const parseComplete = { + name: 'parseComplete', + length: 5, +}; + +const bindComplete = { + name: 'bindComplete', + length: 5, +} + +const closeComplete = { + name: 'closeComplete', + length: 5, +} + +export class PgPacketStream extends Transform { + private remainingBuffer: Buffer = emptyBuffer; + private reader = new BufferReader(); + private mode: Mode; + + constructor(opts: StreamOptions) { + super({ + ...opts, + readableObjectMode: true + }) + if (opts.mode === 'binary') { + throw new Error('Binary mode not supported yet') + } + this.mode = opts.mode; + } + + public _transform(buffer: Buffer, encoding: string, callback: TransformCallback) { + const combinedBuffer = this.remainingBuffer.byteLength ? Buffer.concat([this.remainingBuffer, buffer], this.remainingBuffer.length + buffer.length) : buffer; + let offset = 0; + while ((offset + CODE_LENGTH + LEN_LENGTH) <= combinedBuffer.byteLength) { + // code is 1 byte long - it identifies the message type + const code = combinedBuffer[offset]; + + // length is 1 Uint32BE - it is the length of the message EXCLUDING the code + const length = combinedBuffer.readUInt32BE(offset + CODE_LENGTH); + + const fullMessageLength = CODE_LENGTH + length; + + if (fullMessageLength + offset <= combinedBuffer.byteLength) { + this.handlePacket(offset, code, length, combinedBuffer); + offset += fullMessageLength; + } else { + break; + } + } + + if (offset === combinedBuffer.byteLength) { + this.remainingBuffer = emptyBuffer; + } else { + this.remainingBuffer = combinedBuffer.slice(offset) + } + + callback(null); + } + + private handlePacket(offset: number, code: number, length: number, combinedBuffer: Buffer) { + switch (code) { + case 0x44: // D + this.parseDataRowMessage(offset, length, combinedBuffer); + break; + case 0x32: // 2 + this.emit('message', bindComplete); + break; + case 0x31: // 1 + this.emit('message', parseComplete); + break; + case 0x33: // 3 + this.emit('message', closeComplete); + break; + default: + const packet = combinedBuffer.slice(offset, CODE_LENGTH + length + offset) + this.push({ code, length, packet, buffer: packet.slice(5) }) + } + } + + public _flush(callback: TransformCallback) { + } + + private parseDataRowMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset + 5, bytes); + const fieldCount = this.reader.int16(); + const fields: any[] = new Array(fieldCount); + for (let i = 0; i < fieldCount; i++) { + const len = this.reader.int32(); + if (len === -1) { + fields[i] = null + } else if (this.mode === 'text') { + fields[i] = this.reader.string(len) + } + } + const message = new DataRowMessage(length, fields); + this.emit('message', message); + } +} + + +class DataRowMessage { + public readonly fieldCount: number; + public readonly name: string = 'dataRow' + constructor(public length: number, public fields: any[]) { + this.fieldCount = fields.length; + } +} diff --git a/packages/pg-packet-stream/src/types/chunky.d.ts b/packages/pg-packet-stream/src/types/chunky.d.ts new file mode 100644 index 000000000..7389bda66 --- /dev/null +++ b/packages/pg-packet-stream/src/types/chunky.d.ts @@ -0,0 +1 @@ +declare module 'chunky' diff --git a/packages/pg-packet-stream/tsconfig.json b/packages/pg-packet-stream/tsconfig.json new file mode 100644 index 000000000..f6661febd --- /dev/null +++ b/packages/pg-packet-stream/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + "module": "commonjs", + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "target": "es6", + "noImplicitAny": true, + "moduleResolution": "node", + "sourceMap": true, + "outDir": "dist", + "baseUrl": ".", + "paths": { + "*": [ + "node_modules/*", + "src/types/*" + ] + } + }, + "include": [ + "src/**/*" + ] +} diff --git a/packages/pg/bench.js b/packages/pg/bench.js new file mode 100644 index 000000000..b650177e2 --- /dev/null +++ b/packages/pg/bench.js @@ -0,0 +1,42 @@ +const pg = require("./lib"); +const pool = new pg.Pool() + +const q = { + text: + "select typname, typnamespace, typowner, typlen, typbyval, typcategory, typispreferred, typisdefined, typdelim, typrelid, typelem, typarray from pg_type where typtypmod = $1 and typisdefined = $2", + values: [-1, true] +}; + +const exec = async client => { + const result = await client.query({ + text: q.text, + values: q.values, + rowMode: "array" + }); +}; + +const bench = async (client, time) => { + let start = Date.now(); + let count = 0; + while (true) { + await exec(client); + count++; + if (Date.now() - start > time) { + return count; + } + } +}; + +const run = async () => { + const client = new pg.Client(); + await client.connect(); + await bench(client, 1000); + console.log("warmup done"); + const seconds = 5; + const queries = await bench(client, seconds * 1000); + console.log("queries:", queries); + console.log("qps", queries / seconds); + await client.end(); +}; + +run().catch(e => console.error(e) || process.exit(-1)); diff --git a/packages/pg/lib/client.js b/packages/pg/lib/client.js index 370630a02..077a9f676 100644 --- a/packages/pg/lib/client.js +++ b/packages/pg/lib/client.js @@ -18,6 +18,9 @@ var ConnectionParameters = require('./connection-parameters') var Query = require('./query') var defaults = require('./defaults') var Connection = require('./connection') +if (process.env.PG_FAST_CONNECTION) { + Connection = require('./connection-fast') +} var Client = function (config) { EventEmitter.call(this) @@ -112,7 +115,7 @@ Client.prototype._connect = function (callback) { con.startup(self.getStartupConf()) }) - function checkPgPass (cb) { + function checkPgPass(cb) { return function (msg) { if (typeof self.password === 'function') { self._Promise.resolve() @@ -492,7 +495,7 @@ Client.prototype.query = function (config, values, callback) { // we already returned an error, // just do nothing if query completes - query.callback = () => {} + query.callback = () => { } // Remove from queue var index = this.queryQueue.indexOf(query) diff --git a/packages/pg/lib/connection-fast.js b/packages/pg/lib/connection-fast.js new file mode 100644 index 000000000..5ad38f166 --- /dev/null +++ b/packages/pg/lib/connection-fast.js @@ -0,0 +1,709 @@ +'use strict' +/** + * Copyright (c) 2010-2017 Brian Carlson (brian.m.carlson@gmail.com) + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * README.md file in the root directory of this source tree. + */ + +var net = require('net') +var EventEmitter = require('events').EventEmitter +var util = require('util') + +var Writer = require('buffer-writer') +var Reader = require('packet-reader') +var PacketStream = require('pg-packet-stream') + +console.log(PacketStream.hello()) + +var TEXT_MODE = 0 +var BINARY_MODE = 1 +console.log('using faster connection') +var Connection = function (config) { + EventEmitter.call(this) + config = config || {} + this.stream = config.stream || new net.Socket() + this.stream.setNoDelay(true) + this._keepAlive = config.keepAlive + this._keepAliveInitialDelayMillis = config.keepAliveInitialDelayMillis + this.lastBuffer = false + this.lastOffset = 0 + this.buffer = null + this.offset = null + this.encoding = config.encoding || 'utf8' + this.parsedStatements = {} + this.writer = new Writer() + this.ssl = config.ssl || false + this._ending = false + this._mode = TEXT_MODE + this._emitMessage = false + this._reader = new Reader({ + headerSize: 1, + lengthPadding: -4 + }) + var self = this + this.on('newListener', function (eventName) { + if (eventName === 'message') { + self._emitMessage = true + } + }) +} + +util.inherits(Connection, EventEmitter) + +Connection.prototype.connect = function (port, host) { + var self = this + + if (this.stream.readyState === 'closed') { + this.stream.connect(port, host) + } else if (this.stream.readyState === 'open') { + this.emit('connect') + } + + this.stream.on('connect', function () { + if (self._keepAlive) { + self.stream.setKeepAlive(true, self._keepAliveInitialDelayMillis) + } + self.emit('connect') + }) + + const reportStreamError = function (error) { + // errors about disconnections should be ignored during disconnect + if (self._ending && (error.code === 'ECONNRESET' || error.code === 'EPIPE')) { + return + } + self.emit('error', error) + } + this.stream.on('error', reportStreamError) + + this.stream.on('close', function () { + self.emit('end') + }) + + if (!this.ssl) { + return this.attachListeners(this.stream) + } + + this.stream.once('data', function (buffer) { + var responseCode = buffer.toString('utf8') + switch (responseCode) { + case 'N': // Server does not support SSL connections + return self.emit('error', new Error('The server does not support SSL connections')) + case 'S': // Server supports SSL connections, continue with a secure connection + break + default: + // Any other response byte, including 'E' (ErrorResponse) indicating a server error + return self.emit('error', new Error('There was an error establishing an SSL connection')) + } + var tls = require('tls') + const options = { + socket: self.stream, + checkServerIdentity: self.ssl.checkServerIdentity || tls.checkServerIdentity, + rejectUnauthorized: self.ssl.rejectUnauthorized, + ca: self.ssl.ca, + pfx: self.ssl.pfx, + key: self.ssl.key, + passphrase: self.ssl.passphrase, + cert: self.ssl.cert, + secureOptions: self.ssl.secureOptions, + NPNProtocols: self.ssl.NPNProtocols + } + if (net.isIP(host) === 0) { + options.servername = host + } + self.stream = tls.connect(options) + self.attachListeners(self.stream) + self.stream.on('error', reportStreamError) + + self.emit('sslconnect') + }) +} + +Connection.prototype.attachListeners = function (stream) { + var self = this + const mode = this._mode === TEXT_MODE ? 'text' : 'binary'; + const packetStream = new PacketStream.PgPacketStream({ mode }) + packetStream.on('message', (msg) => { + self.emit(msg.name, msg) + }) + stream.pipe(packetStream).on('data', (packet) => { + // console.log('buff', packet) + var msg = self.parseMessage(packet) + var eventName = msg.name === 'error' ? 'errorMessage' : msg.name + if (self._emitMessage) { + self.emit('message', msg) + } + self.emit(eventName, msg) + }) + stream.on('end', function () { + self.emit('end') + }) +} + +Connection.prototype.requestSsl = function () { + var bodyBuffer = this.writer + .addInt16(0x04d2) + .addInt16(0x162f) + .flush() + + var length = bodyBuffer.length + 4 + + var buffer = new Writer() + .addInt32(length) + .add(bodyBuffer) + .join() + this.stream.write(buffer) +} + +Connection.prototype.startup = function (config) { + var writer = this.writer.addInt16(3).addInt16(0) + + Object.keys(config).forEach(function (key) { + var val = config[key] + writer.addCString(key).addCString(val) + }) + + writer.addCString('client_encoding').addCString("'utf-8'") + + var bodyBuffer = writer.addCString('').flush() + // this message is sent without a code + + var length = bodyBuffer.length + 4 + + var buffer = new Writer() + .addInt32(length) + .add(bodyBuffer) + .join() + this.stream.write(buffer) +} + +Connection.prototype.cancel = function (processID, secretKey) { + var bodyBuffer = this.writer + .addInt16(1234) + .addInt16(5678) + .addInt32(processID) + .addInt32(secretKey) + .flush() + + var length = bodyBuffer.length + 4 + + var buffer = new Writer() + .addInt32(length) + .add(bodyBuffer) + .join() + this.stream.write(buffer) +} + +Connection.prototype.password = function (password) { + // 0x70 = 'p' + this._send(0x70, this.writer.addCString(password)) +} + +Connection.prototype.sendSASLInitialResponseMessage = function (mechanism, initialResponse) { + // 0x70 = 'p' + this.writer + .addCString(mechanism) + .addInt32(Buffer.byteLength(initialResponse)) + .addString(initialResponse) + + this._send(0x70) +} + +Connection.prototype.sendSCRAMClientFinalMessage = function (additionalData) { + // 0x70 = 'p' + this.writer.addString(additionalData) + + this._send(0x70) +} + +Connection.prototype._send = function (code, more) { + if (!this.stream.writable) { + return false + } + return this.stream.write(this.writer.flush(code)) +} + +Connection.prototype.query = function (text) { + // 0x51 = Q + this.stream.write(this.writer.addCString(text).flush(0x51)) +} + +// send parse message +Connection.prototype.parse = function (query) { + // expect something like this: + // { name: 'queryName', + // text: 'select * from blah', + // types: ['int8', 'bool'] } + + // normalize missing query names to allow for null + query.name = query.name || '' + if (query.name.length > 63) { + /* eslint-disable no-console */ + console.error('Warning! Postgres only supports 63 characters for query names.') + console.error('You supplied %s (%s)', query.name, query.name.length) + console.error('This can cause conflicts and silent errors executing queries') + /* eslint-enable no-console */ + } + // normalize null type array + query.types = query.types || [] + var len = query.types.length + var buffer = this.writer + .addCString(query.name) // name of query + .addCString(query.text) // actual query text + .addInt16(len) + for (var i = 0; i < len; i++) { + buffer.addInt32(query.types[i]) + } + + var code = 0x50 + this._send(code) + this.flush() +} + +// send bind message +// "more" === true to buffer the message until flush() is called +Connection.prototype.bind = function (config) { + // normalize config + config = config || {} + config.portal = config.portal || '' + config.statement = config.statement || '' + config.binary = config.binary || false + var values = config.values || [] + var len = values.length + var useBinary = false + for (var j = 0; j < len; j++) { + useBinary |= values[j] instanceof Buffer + } + var buffer = this.writer.addCString(config.portal).addCString(config.statement) + if (!useBinary) { + buffer.addInt16(0) + } else { + buffer.addInt16(len) + for (j = 0; j < len; j++) { + buffer.addInt16(values[j] instanceof Buffer) + } + } + buffer.addInt16(len) + for (var i = 0; i < len; i++) { + var val = values[i] + if (val === null || typeof val === 'undefined') { + buffer.addInt32(-1) + } else if (val instanceof Buffer) { + buffer.addInt32(val.length) + buffer.add(val) + } else { + buffer.addInt32(Buffer.byteLength(val)) + buffer.addString(val) + } + } + + if (config.binary) { + buffer.addInt16(1) // format codes to use binary + buffer.addInt16(1) + } else { + buffer.addInt16(0) // format codes to use text + } + // 0x42 = 'B' + this._send(0x42) + this.flush() +} + +// send execute message +// "more" === true to buffer the message until flush() is called +Connection.prototype.execute = function (config) { + config = config || {} + config.portal = config.portal || '' + config.rows = config.rows || '' + this.writer.addCString(config.portal).addInt32(config.rows) + + // 0x45 = 'E' + this._send(0x45) + this.flush() +} + +var emptyBuffer = Buffer.alloc(0) + +const flushBuffer = Buffer.from([0x48, 0x00, 0x00, 0x00, 0x04]) +Connection.prototype.flush = function () { + if (this.stream.writable) { + this.stream.write(flushBuffer) + } +} + +const syncBuffer = Buffer.from([0x53, 0x00, 0x00, 0x00, 0x04]) +Connection.prototype.sync = function () { + this._ending = true + // clear out any pending data in the writer + this.writer.clear() + if (this.stream.writable) { + this.stream.write(syncBuffer) + this.stream.write(flushBuffer) + } +} + +const END_BUFFER = Buffer.from([0x58, 0x00, 0x00, 0x00, 0x04]) + +Connection.prototype.end = function () { + // 0x58 = 'X' + this.writer.clear() + this._ending = true + return this.stream.write(END_BUFFER, () => { + this.stream.end() + }) +} + +Connection.prototype.close = function (msg) { + this.writer.addCString(msg.type + (msg.name || '')) + this._send(0x43) +} + +Connection.prototype.describe = function (msg) { + this.writer.addCString(msg.type + (msg.name || '')) + this._send(0x44) + this.flush() +} + +Connection.prototype.sendCopyFromChunk = function (chunk) { + this.stream.write(this.writer.add(chunk).flush(0x64)) +} + +Connection.prototype.endCopyFrom = function () { + this.stream.write(this.writer.add(emptyBuffer).flush(0x63)) +} + +Connection.prototype.sendCopyFail = function (msg) { + // this.stream.write(this.writer.add(emptyBuffer).flush(0x66)); + this.writer.addCString(msg) + this._send(0x66) +} + +var Message = function (name, length) { + this.name = name + this.length = length +} + +Connection.prototype.parseMessage = function (packet) { + this.offset = 0 + const { code, length, buffer } = packet + switch (code) { + case 0x52: // R + return this.parseR(buffer, length) + + case 0x53: // S + return this.parseS(buffer, length) + + case 0x4b: // K + return this.parseK(buffer, length) + + case 0x43: // C + return this.parseC(buffer, length) + + case 0x5a: // Z + return this.parseZ(buffer, length) + + case 0x54: // T + return this.parseT(buffer, length) + + case 0x44: // D + return this.parseD(buffer, length) + + case 0x45: // E + return this.parseE(buffer, length) + + case 0x4e: // N + return this.parseN(buffer, length) + + case 0x31: // 1 + return new Message('parseComplete', length) + + case 0x32: // 2 + return new Message('bindComplete', length) + + case 0x33: // 3 + return new Message('closeComplete', length) + + case 0x41: // A + return this.parseA(buffer, length) + + case 0x6e: // n + return new Message('noData', length) + + case 0x49: // I + return new Message('emptyQuery', length) + + case 0x73: // s + return new Message('portalSuspended', length) + + case 0x47: // G + return this.parseG(buffer, length) + + case 0x48: // H + return this.parseH(buffer, length) + + case 0x57: // W + return new Message('replicationStart', length) + + case 0x63: // c + return new Message('copyDone', length) + + case 0x64: // d + return this.parsed(buffer, length) + } + console.log('could not parse', packet) +} + +Connection.prototype.parseR = function (buffer, length) { + var code = this.parseInt32(buffer) + + var msg = new Message('authenticationOk', length) + + switch (code) { + case 0: // AuthenticationOk + return msg + case 3: // AuthenticationCleartextPassword + if (msg.length === 8) { + msg.name = 'authenticationCleartextPassword' + return msg + } + break + case 5: // AuthenticationMD5Password + if (msg.length === 12) { + msg.name = 'authenticationMD5Password' + msg.salt = Buffer.alloc(4) + buffer.copy(msg.salt, 0, this.offset, this.offset + 4) + this.offset += 4 + return msg + } + + break + case 10: // AuthenticationSASL + msg.name = 'authenticationSASL' + msg.mechanisms = [] + do { + var mechanism = this.parseCString(buffer) + + if (mechanism) { + msg.mechanisms.push(mechanism) + } + } while (mechanism) + + return msg + case 11: // AuthenticationSASLContinue + msg.name = 'authenticationSASLContinue' + msg.data = this.readString(buffer, length - 4) + + return msg + case 12: // AuthenticationSASLFinal + msg.name = 'authenticationSASLFinal' + msg.data = this.readString(buffer, length - 4) + + return msg + } + + throw new Error('Unknown authenticationOk message type' + util.inspect(msg)) +} + +Connection.prototype.parseS = function (buffer, length) { + var msg = new Message('parameterStatus', length) + msg.parameterName = this.parseCString(buffer) + msg.parameterValue = this.parseCString(buffer) + return msg +} + +Connection.prototype.parseK = function (buffer, length) { + var msg = new Message('backendKeyData', length) + msg.processID = this.parseInt32(buffer) + msg.secretKey = this.parseInt32(buffer) + return msg +} + +Connection.prototype.parseC = function (buffer, length) { + var msg = new Message('commandComplete', length) + msg.text = this.parseCString(buffer) + return msg +} + +Connection.prototype.parseZ = function (buffer, length) { + var msg = new Message('readyForQuery', length) + msg.name = 'readyForQuery' + msg.status = this.readString(buffer, 1) + return msg +} + +var ROW_DESCRIPTION = 'rowDescription' +Connection.prototype.parseT = function (buffer, length) { + var msg = new Message(ROW_DESCRIPTION, length) + msg.fieldCount = this.parseInt16(buffer) + var fields = [] + for (var i = 0; i < msg.fieldCount; i++) { + fields.push(this.parseField(buffer)) + } + msg.fields = fields + return msg +} + +var Field = function () { + this.name = null + this.tableID = null + this.columnID = null + this.dataTypeID = null + this.dataTypeSize = null + this.dataTypeModifier = null + this.format = null +} + +var FORMAT_TEXT = 'text' +var FORMAT_BINARY = 'binary' +Connection.prototype.parseField = function (buffer) { + var field = new Field() + field.name = this.parseCString(buffer) + field.tableID = this.parseInt32(buffer) + field.columnID = this.parseInt16(buffer) + field.dataTypeID = this.parseInt32(buffer) + field.dataTypeSize = this.parseInt16(buffer) + field.dataTypeModifier = this.parseInt32(buffer) + if (this.parseInt16(buffer) === TEXT_MODE) { + this._mode = TEXT_MODE + field.format = FORMAT_TEXT + } else { + this._mode = BINARY_MODE + field.format = FORMAT_BINARY + } + return field +} + +var DATA_ROW = 'dataRow' +var DataRowMessage = function (length, fieldCount) { + this.name = DATA_ROW + this.length = length + this.fieldCount = fieldCount + this.fields = [] +} + +// extremely hot-path code +Connection.prototype.parseD = function (buffer, length) { + var fieldCount = this.parseInt16(buffer) + var msg = new DataRowMessage(length, fieldCount) + for (var i = 0; i < fieldCount; i++) { + msg.fields.push(this._readValue(buffer)) + } + return msg +} + +// extremely hot-path code +Connection.prototype._readValue = function (buffer) { + var length = this.parseInt32(buffer) + if (length === -1) return null + if (this._mode === TEXT_MODE) { + return this.readString(buffer, length) + } + return this.readBytes(buffer, length) +} + +// parses error +Connection.prototype.parseE = function (buffer, length) { + var fields = {} + var fieldType = this.readString(buffer, 1) + while (fieldType !== '\0') { + fields[fieldType] = this.parseCString(buffer) + fieldType = this.readString(buffer, 1) + } + + // the msg is an Error instance + var msg = new Error(fields.M) + + // for compatibility with Message + msg.name = 'error' + msg.length = length + + msg.severity = fields.S + msg.code = fields.C + msg.detail = fields.D + msg.hint = fields.H + msg.position = fields.P + msg.internalPosition = fields.p + msg.internalQuery = fields.q + msg.where = fields.W + msg.schema = fields.s + msg.table = fields.t + msg.column = fields.c + msg.dataType = fields.d + msg.constraint = fields.n + msg.file = fields.F + msg.line = fields.L + msg.routine = fields.R + return msg +} + +// same thing, different name +Connection.prototype.parseN = function (buffer, length) { + var msg = this.parseE(buffer, length) + msg.name = 'notice' + return msg +} + +Connection.prototype.parseA = function (buffer, length) { + var msg = new Message('notification', length) + msg.processId = this.parseInt32(buffer) + msg.channel = this.parseCString(buffer) + msg.payload = this.parseCString(buffer) + return msg +} + +Connection.prototype.parseG = function (buffer, length) { + var msg = new Message('copyInResponse', length) + return this.parseGH(buffer, msg) +} + +Connection.prototype.parseH = function (buffer, length) { + var msg = new Message('copyOutResponse', length) + return this.parseGH(buffer, msg) +} + +Connection.prototype.parseGH = function (buffer, msg) { + var isBinary = buffer[this.offset] !== 0 + this.offset++ + msg.binary = isBinary + var columnCount = this.parseInt16(buffer) + msg.columnTypes = [] + for (var i = 0; i < columnCount; i++) { + msg.columnTypes.push(this.parseInt16(buffer)) + } + return msg +} + +Connection.prototype.parsed = function (buffer, length) { + var msg = new Message('copyData', length) + msg.chunk = this.readBytes(buffer, msg.length - 4) + return msg +} + +Connection.prototype.parseInt32 = function (buffer) { + var value = buffer.readInt32BE(this.offset) + this.offset += 4 + return value +} + +Connection.prototype.parseInt16 = function (buffer) { + var value = buffer.readInt16BE(this.offset) + this.offset += 2 + return value +} + +Connection.prototype.readString = function (buffer, length) { + return buffer.toString(this.encoding, this.offset, (this.offset += length)) +} + +Connection.prototype.readBytes = function (buffer, length) { + return buffer.slice(this.offset, (this.offset += length)) +} + +Connection.prototype.parseCString = function (buffer) { + var start = this.offset + var end = buffer.indexOf(0, start) + this.offset = end + 1 + return buffer.toString(this.encoding, start, end) +} +// end parsing methods +module.exports = Connection diff --git a/packages/pg/lib/result.js b/packages/pg/lib/result.js index 088298bc4..7e59a413e 100644 --- a/packages/pg/lib/result.js +++ b/packages/pg/lib/result.js @@ -16,9 +16,9 @@ var Result = function (rowMode, types) { this.command = null this.rowCount = null this.oid = null - this.rows = [] - this.fields = [] - this._parsers = [] + this.rows = []; + this.fields = undefined; + this._parsers = undefined; this._types = types this.RowCtor = null this.rowAsArray = rowMode === 'array' @@ -53,13 +53,13 @@ Result.prototype.addCommandComplete = function (msg) { } Result.prototype._parseRowAsArray = function (rowData) { - var row = [] + var row = new Array(rowData.length) for (var i = 0, len = rowData.length; i < len; i++) { var rawValue = rowData[i] if (rawValue !== null) { - row.push(this._parsers[i](rawValue)) + row[i] = this._parsers[i](rawValue) } else { - row.push(null) + row[i] = null } } return row @@ -88,15 +88,17 @@ Result.prototype.addFields = function (fieldDescriptions) { // multiple query statements in 1 action can result in multiple sets // of rowDescriptions...eg: 'select NOW(); select 1::int;' // you need to reset the fields + this.fields = fieldDescriptions; if (this.fields.length) { - this.fields = [] - this._parsers = [] + this._parsers = new Array(fieldDescriptions.length); } for (var i = 0; i < fieldDescriptions.length; i++) { var desc = fieldDescriptions[i] - this.fields.push(desc) - var parser = (this._types || types).getTypeParser(desc.dataTypeID, desc.format || 'text') - this._parsers.push(parser) + if (this._types) { + this._parsers[i] = this._types.getTypeParser(desc.dataTypeID, desc.format || 'text'); + } else { + this._parsers[i] = types.getTypeParser(desc.dataTypeID, desc.format || 'text') + } } } diff --git a/packages/pg/package.json b/packages/pg/package.json index 951e42df6..e254afba8 100644 --- a/packages/pg/package.json +++ b/packages/pg/package.json @@ -22,6 +22,7 @@ "buffer-writer": "2.0.0", "packet-reader": "1.0.0", "pg-connection-string": "0.1.3", + "pg-packet-stream": "^1.0.0", "pg-pool": "^2.0.7", "pg-types": "^2.1.0", "pgpass": "1.x", diff --git a/packages/pg/test/integration/client/api-tests.js b/packages/pg/test/integration/client/api-tests.js index c274bbd36..dab923505 100644 --- a/packages/pg/test/integration/client/api-tests.js +++ b/packages/pg/test/integration/client/api-tests.js @@ -4,6 +4,38 @@ var pg = helper.pg var suite = new helper.Suite() +suite.test('null and undefined are both inserted as NULL', function (done) { + const pool = new pg.Pool() + pool.connect( + assert.calls(function (err, client, release) { + assert(!err) + client.query( + 'CREATE TEMP TABLE my_nulls(a varchar(1), b varchar(1), c integer, d integer, e date, f date)' + ) + client.query( + 'INSERT INTO my_nulls(a,b,c,d,e,f) VALUES ($1,$2,$3,$4,$5,$6)', + [null, undefined, null, undefined, null, undefined] + ) + client.query( + 'SELECT * FROM my_nulls', + assert.calls(function (err, result) { + console.log(err) + assert.ifError(err) + assert.equal(result.rows.length, 1) + assert.isNull(result.rows[0].a) + assert.isNull(result.rows[0].b) + assert.isNull(result.rows[0].c) + assert.isNull(result.rows[0].d) + assert.isNull(result.rows[0].e) + assert.isNull(result.rows[0].f) + pool.end(done) + release() + }) + ) + }) + ) +}) + suite.test('pool callback behavior', done => { // test weird callback behavior with node-pool const pool = new pg.Pool() @@ -16,7 +48,7 @@ suite.test('pool callback behavior', done => { }) suite.test('query timeout', (cb) => { - const pool = new pg.Pool({query_timeout: 1000}) + const pool = new pg.Pool({ query_timeout: 1000 }) pool.connect().then((client) => { client.query('SELECT pg_sleep(2)', assert.calls(function (err, result) { assert(err) @@ -28,7 +60,7 @@ suite.test('query timeout', (cb) => { }) suite.test('query recover from timeout', (cb) => { - const pool = new pg.Pool({query_timeout: 1000}) + const pool = new pg.Pool({ query_timeout: 1000 }) pool.connect().then((client) => { client.query('SELECT pg_sleep(20)', assert.calls(function (err, result) { assert(err) @@ -46,7 +78,7 @@ suite.test('query recover from timeout', (cb) => { }) suite.test('query no timeout', (cb) => { - const pool = new pg.Pool({query_timeout: 10000}) + const pool = new pg.Pool({ query_timeout: 10000 }) pool.connect().then((client) => { client.query('SELECT pg_sleep(1)', assert.calls(function (err, result) { assert(!err) @@ -131,18 +163,18 @@ suite.test('raises error if cannot connect', function () { suite.test('query errors are handled and do not bubble if callback is provided', function (done) { const pool = new pg.Pool() pool.connect( - assert.calls(function (err, client, release) { - assert(!err) - client.query( - 'SELECT OISDJF FROM LEIWLISEJLSE', - assert.calls(function (err, result) { - assert.ok(err) - release() - pool.end(done) - }) - ) - }) - ) + assert.calls(function (err, client, release) { + assert(!err) + client.query( + 'SELECT OISDJF FROM LEIWLISEJLSE', + assert.calls(function (err, result) { + assert.ok(err) + release() + pool.end(done) + }) + ) + }) + ) } ) @@ -216,34 +248,3 @@ suite.test('can provide callback and config and parameters', function (done) { }) ) }) - -suite.test('null and undefined are both inserted as NULL', function (done) { - const pool = new pg.Pool() - pool.connect( - assert.calls(function (err, client, release) { - assert(!err) - client.query( - 'CREATE TEMP TABLE my_nulls(a varchar(1), b varchar(1), c integer, d integer, e date, f date)' - ) - client.query( - 'INSERT INTO my_nulls(a,b,c,d,e,f) VALUES ($1,$2,$3,$4,$5,$6)', - [null, undefined, null, undefined, null, undefined] - ) - client.query( - 'SELECT * FROM my_nulls', - assert.calls(function (err, result) { - assert(!err) - assert.equal(result.rows.length, 1) - assert.isNull(result.rows[0].a) - assert.isNull(result.rows[0].b) - assert.isNull(result.rows[0].c) - assert.isNull(result.rows[0].d) - assert.isNull(result.rows[0].e) - assert.isNull(result.rows[0].f) - pool.end(done) - release() - }) - ) - }) - ) -}) diff --git a/yarn.lock b/yarn.lock index 110a8dc72..cd374263a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -831,6 +831,11 @@ dependencies: "@types/node" ">= 8" +"@types/chai@^4.2.7": + version "4.2.7" + resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.7.tgz#1c8c25cbf6e59ffa7d6b9652c78e547d9a41692d" + integrity sha512-luq8meHGYwvky0O7u0eQZdA7B4Wd9owUCqvbw2m3XCrCU8mplYOujMBbvyS547AxJkC+pGnd0Cm15eNxEUNU8g== + "@types/events@*": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7" @@ -850,7 +855,12 @@ resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== -"@types/node@*", "@types/node@>= 8": +"@types/mocha@^5.2.7": + version "5.2.7" + resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-5.2.7.tgz#315d570ccb56c53452ff8638738df60726d5b6ea" + integrity sha512-NYrtPht0wGzhwe9+/idPaBB+TqkY9AhTvOLMkThm0IoEfLaiVQZwBwyJ5puCkO3AUCWrmcoePjp2mbFocKy4SQ== + +"@types/node@*", "@types/node@>= 8", "@types/node@^12.12.21": version "12.12.21" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.21.tgz#aa44a6363291c7037111c47e4661ad210aded23f" integrity sha512-8sRGhbpU+ck1n0PGAUgVrWrWdjSW2aqNeyC15W88GRsMpSwzv6RJGlLhE7s2RhVSOdyDmxbqlWSeThq4/7xqlA== @@ -985,6 +995,11 @@ are-we-there-yet@~1.1.2: delegates "^1.0.0" readable-stream "^2.0.6" +arg@^4.1.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.2.tgz#e70c90579e02c63d80e3ad4e31d8bfdb8bd50064" + integrity sha512-+ytCkGcBtHZ3V2r2Z06AncYO8jz46UEamcspGoU8lHcEbpn6J77QK0vdWvChsclg/tM5XIJC5tnjmPp7Eq6Obg== + argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" @@ -1077,6 +1092,11 @@ assert-plus@1.0.0, assert-plus@^1.0.0: resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= +assertion-error@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.1.0.tgz#e60b6b0e8f301bd97e5375215bda406c85118c0b" + integrity sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw== + assign-symbols@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" @@ -1318,6 +1338,18 @@ caseless@~0.12.0: resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= +chai@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/chai/-/chai-4.2.0.tgz#760aa72cf20e3795e84b12877ce0e83737aa29e5" + integrity sha512-XQU3bhBukrOsQCuwZndwGcCVQHyZi53fQ6Ys1Fym7E4olpIqqZZhhoFJoaKVvV17lWQoXYwgWN2nF5crA8J2jw== + dependencies: + assertion-error "^1.1.0" + check-error "^1.0.2" + deep-eql "^3.0.1" + get-func-name "^2.0.0" + pathval "^1.1.0" + type-detect "^4.0.5" + chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.3.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" @@ -1332,11 +1364,21 @@ chardet@^0.7.0: resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== +check-error@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/check-error/-/check-error-1.0.2.tgz#574d312edd88bb5dd8912e9286dd6c0aed4aac82" + integrity sha1-V00xLt2Iu13YkS6Sht1sCu1KrII= + chownr@^1.1.1, chownr@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.3.tgz#42d837d5239688d55f303003a508230fa6727142" integrity sha512-i70fVHhmV3DtTl6nqvZOnIjbY0Pe4kAUjwHj8z0zAdgBtYrJyYwLKCCuRBQ5ppkyL0AkN7HKRnETdmdp1zqNXw== +chunky@^0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/chunky/-/chunky-0.0.0.tgz#1e7580a23c083897d2ad662459e7efd8465f608a" + integrity sha1-HnWAojwIOJfSrWYkWefv2EZfYIo= + ci-info@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" @@ -1713,6 +1755,13 @@ dedent@^0.7.0: resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" integrity sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw= +deep-eql@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-3.0.1.tgz#dfc9404400ad1c8fe023e7da1df1c147c4b444df" + integrity sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw== + dependencies: + type-detect "^4.0.0" + deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" @@ -1787,6 +1836,11 @@ diff@3.5.0: resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12" integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA== +diff@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.1.tgz#0c667cb467ebbb5cea7f14f135cc2dba7780a8ff" + integrity sha512-s2+XdvhPCOF01LRQBC8hf4vhbVmI2CGS5aZnxLJlT5FtdhPCDFq80q++zK2KlrVorVDdL5BOGZ/VfLrVtYNF+Q== + dir-glob@^2.2.2: version "2.2.2" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-2.2.2.tgz#fa09f0694153c8918b18ba0deafae94769fc50c4" @@ -2416,6 +2470,11 @@ get-caller-file@^2.0.1: resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== +get-func-name@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.0.tgz#ead774abee72e20409433a066366023dd6887a41" + integrity sha1-6td0q+5y4gQJQzoGY2YCPdaIekE= + get-pkg-repo@^1.0.0: version "1.4.0" resolved "https://registry.yarnpkg.com/get-pkg-repo/-/get-pkg-repo-1.4.0.tgz#c73b489c06d80cc5536c2c853f9e05232056972d" @@ -3388,6 +3447,11 @@ make-dir@^2.1.0: pify "^4.0.1" semver "^5.6.0" +make-error@^1.1.1: + version "1.3.5" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.5.tgz#efe4e81f6db28cadd605c70f29c831b58ef776c8" + integrity sha512-c3sIjNUow0+8swNwVpqoH4YCShKNFkMaw6oH1mNS2haDZQqkeZFlHS3dhoeEbKKmJB4vXpJucU6oH75aDYeE9g== + make-fetch-happen@^5.0.0: version "5.0.2" resolved "https://registry.yarnpkg.com/make-fetch-happen/-/make-fetch-happen-5.0.2.tgz#aa8387104f2687edca01c8687ee45013d02d19bd" @@ -4202,6 +4266,11 @@ path-type@^3.0.0: dependencies: pify "^3.0.0" +pathval@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.0.tgz#b942e6d4bde653005ef6b71361def8727d0645e0" + integrity sha1-uULm1L3mUwBe9rcTYd74cn0GReA= + performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" @@ -4886,6 +4955,14 @@ source-map-resolve@^0.5.0: source-map-url "^0.4.0" urix "^0.1.0" +source-map-support@^0.5.6: + version "0.5.16" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042" + integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + source-map-url@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" @@ -4896,7 +4973,7 @@ source-map@^0.5.6: resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= -source-map@^0.6.1, source-map@~0.6.1: +source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== @@ -5297,6 +5374,17 @@ trim-off-newlines@^1.0.0: resolved "https://registry.yarnpkg.com/trim-off-newlines/-/trim-off-newlines-1.0.1.tgz#9f9ba9d9efa8764c387698bcbfeb2c848f11adb3" integrity sha1-n5up2e+odkw4dpi8v+sshI8RrbM= +ts-node@^8.5.4: + version "8.5.4" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-8.5.4.tgz#a152add11fa19c221d0b48962c210cf467262ab2" + integrity sha512-izbVCRV68EasEPQ8MSIGBNK9dc/4sYJJKYA+IarMQct1RtEot6Xp0bXuClsbUSnKpg50ho+aOAx8en5c+y4OFw== + dependencies: + arg "^4.1.0" + diff "^4.0.1" + make-error "^1.1.1" + source-map-support "^0.5.6" + yn "^3.0.0" + tslib@^1.9.0: version "1.10.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" @@ -5321,6 +5409,11 @@ type-check@~0.3.2: dependencies: prelude-ls "~1.1.2" +type-detect@^4.0.0, type-detect@^4.0.5: + version "4.0.8" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + type-fest@^0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.3.1.tgz#63d00d204e059474fe5e1b7c011112bbd1dc29e1" @@ -5336,6 +5429,11 @@ typedarray@^0.0.6: resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= +typescript@^3.7.3: + version "3.7.3" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.7.3.tgz#b36840668a16458a7025b9eabfad11b66ab85c69" + integrity sha512-Mcr/Qk7hXqFBXMN7p7Lusj1ktCBydylfQM/FZCk5glCNQJrCUKPkMHdo9R0MTFWsC/4kPFvDS0fDPvukfCkFsw== + uglify-js@^3.1.4: version "3.7.2" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.7.2.tgz#cb1a601e67536e9ed094a92dd1e333459643d3f9" @@ -5659,3 +5757,8 @@ yargs@^14.2.2: which-module "^2.0.0" y18n "^4.0.0" yargs-parser "^15.0.0" + +yn@^3.0.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== From d2cad384520458674e56c396a8f1afa28f7b9fe5 Mon Sep 17 00:00:00 2001 From: "Brian M. Carlson" Date: Thu, 19 Dec 2019 07:39:04 -0600 Subject: [PATCH 02/10] Dont use experimental parser yet --- packages/pg/bench.js | 1 + packages/pg/lib/connection-fast.js | 46 +++++++++++++++++++++--------- 2 files changed, 33 insertions(+), 14 deletions(-) diff --git a/packages/pg/bench.js b/packages/pg/bench.js index b650177e2..7a7084aee 100644 --- a/packages/pg/bench.js +++ b/packages/pg/bench.js @@ -36,6 +36,7 @@ const run = async () => { const queries = await bench(client, seconds * 1000); console.log("queries:", queries); console.log("qps", queries / seconds); + console.log("on my laptop best so far seen 713 qps") await client.end(); }; diff --git a/packages/pg/lib/connection-fast.js b/packages/pg/lib/connection-fast.js index 5ad38f166..aea9eacd4 100644 --- a/packages/pg/lib/connection-fast.js +++ b/packages/pg/lib/connection-fast.js @@ -15,8 +15,6 @@ var Writer = require('buffer-writer') var Reader = require('packet-reader') var PacketStream = require('pg-packet-stream') -console.log(PacketStream.hello()) - var TEXT_MODE = 0 var BINARY_MODE = 1 console.log('using faster connection') @@ -122,25 +120,45 @@ Connection.prototype.connect = function (port, host) { Connection.prototype.attachListeners = function (stream) { var self = this - const mode = this._mode === TEXT_MODE ? 'text' : 'binary'; - const packetStream = new PacketStream.PgPacketStream({ mode }) - packetStream.on('message', (msg) => { - self.emit(msg.name, msg) - }) - stream.pipe(packetStream).on('data', (packet) => { - // console.log('buff', packet) - var msg = self.parseMessage(packet) - var eventName = msg.name === 'error' ? 'errorMessage' : msg.name - if (self._emitMessage) { - self.emit('message', msg) + stream.on('data', function (buff) { + self._reader.addChunk(buff) + var packet = self._reader.read() + while (packet) { + var msg = self.parseMessage({ code: self._reader.header, length: packet.length + 4, buffer: packet }) + var eventName = msg.name === 'error' ? 'errorMessage' : msg.name + if (self._emitMessage) { + self.emit('message', msg) + } + self.emit(eventName, msg) + packet = self._reader.read() } - self.emit(eventName, msg) }) stream.on('end', function () { self.emit('end') }) } +// Connection.prototype.attachListeners = function (stream) { +// var self = this +// const mode = this._mode === TEXT_MODE ? 'text' : 'binary'; +// const packetStream = new PacketStream.PgPacketStream({ mode }) +// packetStream.on('message', (msg) => { +// self.emit(msg.name, msg) +// }) +// stream.pipe(packetStream).on('data', (packet) => { +// // console.log('buff', packet) +// var msg = self.parseMessage(packet) +// var eventName = msg.name === 'error' ? 'errorMessage' : msg.name +// if (self._emitMessage) { +// self.emit('message', msg) +// } +// self.emit(eventName, msg) +// }) +// stream.on('end', function () { +// self.emit('end') +// }) +// } + Connection.prototype.requestSsl = function () { var bodyBuffer = this.writer .addInt16(0x04d2) From fa44905b3068ccd289ed7f7ee733d4cd94cc3de9 Mon Sep 17 00:00:00 2001 From: "Brian M. Carlson" Date: Thu, 19 Dec 2019 14:33:37 -0600 Subject: [PATCH 03/10] port over some tests --- .../src/inbound-parser.test.ts | 477 ++++++++++++++++++ packages/pg-packet-stream/src/index.ts | 338 ++++++++++++- .../src/testing/buffer-list.ts | 75 +++ .../src/testing/test-buffers.ts | 151 ++++++ 4 files changed, 1020 insertions(+), 21 deletions(-) create mode 100644 packages/pg-packet-stream/src/inbound-parser.test.ts create mode 100644 packages/pg-packet-stream/src/testing/buffer-list.ts create mode 100644 packages/pg-packet-stream/src/testing/test-buffers.ts diff --git a/packages/pg-packet-stream/src/inbound-parser.test.ts b/packages/pg-packet-stream/src/inbound-parser.test.ts new file mode 100644 index 000000000..bdfb8a3b1 --- /dev/null +++ b/packages/pg-packet-stream/src/inbound-parser.test.ts @@ -0,0 +1,477 @@ +import buffers from './testing/test-buffers' +import BufferList from './testing/buffer-list' +import { PgPacketStream } from './' +import assert from 'assert' + +var authOkBuffer = buffers.authenticationOk() +var paramStatusBuffer = buffers.parameterStatus('client_encoding', 'UTF8') +var readyForQueryBuffer = buffers.readyForQuery() +var backendKeyDataBuffer = buffers.backendKeyData(1, 2) +var commandCompleteBuffer = buffers.commandComplete('SELECT 3') +var parseCompleteBuffer = buffers.parseComplete() +var bindCompleteBuffer = buffers.bindComplete() +var portalSuspendedBuffer = buffers.portalSuspended() + +var addRow = function (bufferList: BufferList, name: string, offset: number) { + return bufferList.addCString(name) // field name + .addInt32(offset++) // table id + .addInt16(offset++) // attribute of column number + .addInt32(offset++) // objectId of field's data type + .addInt16(offset++) // datatype size + .addInt32(offset++) // type modifier + .addInt16(0) // format code, 0 => text +} + +var row1 = { + name: 'id', + tableID: 1, + attributeNumber: 2, + dataTypeID: 3, + dataTypeSize: 4, + typeModifier: 5, + formatCode: 0 +} +var oneRowDescBuff = buffers.rowDescription([row1]) +row1.name = 'bang' + +var twoRowBuf = buffers.rowDescription([row1, { + name: 'whoah', + tableID: 10, + attributeNumber: 11, + dataTypeID: 12, + dataTypeSize: 13, + typeModifier: 14, + formatCode: 0 +}]) + +var emptyRowFieldBuf = new BufferList() + .addInt16(0) + .join(true, 'D') + +var emptyRowFieldBuf = buffers.dataRow([]) + +var oneFieldBuf = new BufferList() + .addInt16(1) // number of fields + .addInt32(5) // length of bytes of fields + .addCString('test') + .join(true, 'D') + +var oneFieldBuf = buffers.dataRow(['test']) + +var expectedAuthenticationOkayMessage = { + name: 'authenticationOk', + length: 8 +} + +var expectedParameterStatusMessage = { + name: 'parameterStatus', + parameterName: 'client_encoding', + parameterValue: 'UTF8', + length: 25 +} + +var expectedBackendKeyDataMessage = { + name: 'backendKeyData', + processID: 1, + secretKey: 2 +} + +var expectedReadyForQueryMessage = { + name: 'readyForQuery', + length: 5, + status: 'I' +} + +var expectedCommandCompleteMessage = { + name: 'commandComplete', + length: 13, + text: 'SELECT 3' +} +var emptyRowDescriptionBuffer = new BufferList() + .addInt16(0) // number of fields + .join(true, 'T') + +var expectedEmptyRowDescriptionMessage = { + name: 'rowDescription', + length: 6, + fieldCount: 0, + fields: [], +} +var expectedOneRowMessage = { + name: 'rowDescription', + length: 27, + fieldCount: 1, + fields: [{ + name: 'id', + tableID: 1, + columnID: 2, + dataTypeID: 3, + dataTypeSize: 4, + dataTypeModifier: 5, + format: 'text' + }] +} + +var expectedTwoRowMessage = { + name: 'rowDescription', + length: 53, + fieldCount: 2, + fields: [{ + name: 'bang', + tableID: 1, + columnID: 2, + dataTypeID: 3, + dataTypeSize: 4, + dataTypeModifier: 5, + format: 'text' + }, + { + name: 'whoah', + tableID: 10, + columnID: 11, + dataTypeID: 12, + dataTypeSize: 13, + dataTypeModifier: 14, + format: 'text' + }] +} + +var testForMessage = function (buffer: Buffer, expectedMessage: any) { + it('recieves and parses ' + expectedMessage.name, async () => { + const parser = new PgPacketStream(); + + await new Promise((resolve) => { + let lastMessage: any = {} + parser.on('message', function (msg) { + lastMessage = msg + }) + + parser.write(buffer); + + for (const key in expectedMessage) { + assert.deepEqual(lastMessage[key], expectedMessage[key]) + } + resolve(); + }) + }) +} + +var plainPasswordBuffer = buffers.authenticationCleartextPassword() +var md5PasswordBuffer = buffers.authenticationMD5Password() +var SASLBuffer = buffers.authenticationSASL() +var SASLContinueBuffer = buffers.authenticationSASLContinue() +var SASLFinalBuffer = buffers.authenticationSASLFinal() + +var expectedPlainPasswordMessage = { + name: 'authenticationCleartextPassword' +} + +var expectedMD5PasswordMessage = { + name: 'authenticationMD5Password', + salt: Buffer.from([1, 2, 3, 4]) +} + +var expectedSASLMessage = { + name: 'authenticationSASL', + mechanisms: ['SCRAM-SHA-256'] +} + +var expectedSASLContinueMessage = { + name: 'authenticationSASLContinue', + data: 'data', +} + +var expectedSASLFinalMessage = { + name: 'authenticationSASLFinal', + data: 'data', +} + +var notificationResponseBuffer = buffers.notification(4, 'hi', 'boom') +var expectedNotificationResponseMessage = { + name: 'notification', + processId: 4, + channel: 'hi', + payload: 'boom' +} + +describe('PgPacketStream', function () { + testForMessage(authOkBuffer, expectedAuthenticationOkayMessage) + testForMessage(plainPasswordBuffer, expectedPlainPasswordMessage) + testForMessage(md5PasswordBuffer, expectedMD5PasswordMessage) + testForMessage(SASLBuffer, expectedSASLMessage) + testForMessage(SASLContinueBuffer, expectedSASLContinueMessage) + testForMessage(SASLFinalBuffer, expectedSASLFinalMessage) + + testForMessage(paramStatusBuffer, expectedParameterStatusMessage) + testForMessage(backendKeyDataBuffer, expectedBackendKeyDataMessage) + testForMessage(readyForQueryBuffer, expectedReadyForQueryMessage) + testForMessage(commandCompleteBuffer, expectedCommandCompleteMessage) + testForMessage(notificationResponseBuffer, expectedNotificationResponseMessage) + testForMessage(buffers.emptyQuery(), { + name: 'emptyQuery', + length: 4, + }) + + testForMessage(Buffer.from([0x6e, 0, 0, 0, 4]), { + name: 'noData' + }) + + describe('rowDescription messages', function () { + testForMessage(emptyRowDescriptionBuffer, expectedEmptyRowDescriptionMessage) + testForMessage(oneRowDescBuff, expectedOneRowMessage) + testForMessage(twoRowBuf, expectedTwoRowMessage) + }) + + describe('parsing rows', function () { + describe('parsing empty row', function () { + testForMessage(emptyRowFieldBuf, { + name: 'dataRow', + fieldCount: 0 + }) + }) + + describe('parsing data row with fields', function () { + testForMessage(oneFieldBuf, { + name: 'dataRow', + fieldCount: 1, + fields: ['test'] + }) + }) + }) + + describe('notice message', function () { + // this uses the same logic as error message + var buff = buffers.notice([{ type: 'C', value: 'code' }]) + testForMessage(buff, { + name: 'notice', + code: 'code' + }) + }) + + testForMessage(buffers.error([]), { + name: 'error' + }) + + describe('with all the fields', function () { + var buffer = buffers.error([{ + type: 'S', + value: 'ERROR' + }, { + type: 'C', + value: 'code' + }, { + type: 'M', + value: 'message' + }, { + type: 'D', + value: 'details' + }, { + type: 'H', + value: 'hint' + }, { + type: 'P', + value: '100' + }, { + type: 'p', + value: '101' + }, { + type: 'q', + value: 'query' + }, { + type: 'W', + value: 'where' + }, { + type: 'F', + value: 'file' + }, { + type: 'L', + value: 'line' + }, { + type: 'R', + value: 'routine' + }, { + type: 'Z', // ignored + value: 'alsdkf' + }]) + + testForMessage(buffer, { + name: 'error', + severity: 'ERROR', + code: 'code', + message: 'message', + detail: 'details', + hint: 'hint', + position: '100', + internalPosition: '101', + internalQuery: 'query', + where: 'where', + file: 'file', + line: 'line', + routine: 'routine' + }) + }) + + testForMessage(parseCompleteBuffer, { + name: 'parseComplete' + }) + + testForMessage(bindCompleteBuffer, { + name: 'bindComplete' + }) + + testForMessage(bindCompleteBuffer, { + name: 'bindComplete' + }) + + testForMessage(buffers.closeComplete(), { + name: 'closeComplete' + }) + + describe('parses portal suspended message', function () { + testForMessage(portalSuspendedBuffer, { + name: 'portalSuspended' + }) + }) + + describe('parses replication start message', function () { + testForMessage(Buffer.from([0x57, 0x00, 0x00, 0x00, 0x04]), { + name: 'replicationStart', + length: 4 + }) + }) + + + // since the data message on a stream can randomly divide the incomming + // tcp packets anywhere, we need to make sure we can parse every single + // split on a tcp message + describe('split buffer, single message parsing', function () { + var fullBuffer = buffers.dataRow([null, 'bang', 'zug zug', null, '!']) + + const parse = (buffers: Buffer[]): Promise => { + return new Promise((resolve) => { + const parser = new PgPacketStream(); + parser.once('message', (msg) => { + resolve(msg) + }) + for (const buffer of buffers) { + parser.write(buffer); + } + parser.end() + }) + } + + it('parses when full buffer comes in', async function () { + const message = await parse([fullBuffer]); + assert.equal(message.fields.length, 5) + assert.equal(message.fields[0], null) + assert.equal(message.fields[1], 'bang') + assert.equal(message.fields[2], 'zug zug') + assert.equal(message.fields[3], null) + assert.equal(message.fields[4], '!') + }) + + var testMessageRecievedAfterSpiltAt = async function (split: number) { + var firstBuffer = Buffer.alloc(fullBuffer.length - split) + var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length) + fullBuffer.copy(firstBuffer, 0, 0) + fullBuffer.copy(secondBuffer, 0, firstBuffer.length) + const message = await parse([firstBuffer, secondBuffer]); + assert.equal(message.fields.length, 5) + assert.equal(message.fields[0], null) + assert.equal(message.fields[1], 'bang') + assert.equal(message.fields[2], 'zug zug') + assert.equal(message.fields[3], null) + assert.equal(message.fields[4], '!') + } + + it('parses when split in the middle', function () { + testMessageRecievedAfterSpiltAt(6) + }) + + it('parses when split at end', function () { + testMessageRecievedAfterSpiltAt(2) + }) + + it('parses when split at beginning', function () { + testMessageRecievedAfterSpiltAt(fullBuffer.length - 2) + testMessageRecievedAfterSpiltAt(fullBuffer.length - 1) + testMessageRecievedAfterSpiltAt(fullBuffer.length - 5) + }) + }) + + describe('split buffer, multiple message parsing', function () { + var dataRowBuffer = buffers.dataRow(['!']) + var readyForQueryBuffer = buffers.readyForQuery() + var fullBuffer = Buffer.alloc(dataRowBuffer.length + readyForQueryBuffer.length) + dataRowBuffer.copy(fullBuffer, 0, 0) + readyForQueryBuffer.copy(fullBuffer, dataRowBuffer.length, 0) + + const parse = (buffers: Buffer[]): Promise => { + return new Promise((resolve) => { + const parser = new PgPacketStream(); + const results: any[] = [] + parser.on('message', (msg) => { + results.push(msg) + if (results.length === 2) { + resolve(results) + } + }) + for (const buffer of buffers) { + parser.write(buffer); + } + parser.end() + }) + } + + var verifyMessages = function (messages: any[]) { + assert.strictEqual(messages.length, 2) + assert.deepEqual(messages[0], { + name: 'dataRow', + fieldCount: 1, + length: 11, + fields: ['!'] + }) + assert.equal(messages[0].fields[0], '!') + assert.deepEqual(messages[1], { + name: 'readyForQuery', + length: 5, + status: 'I' + }) + } + // sanity check + it('recieves both messages when packet is not split', async function () { + const messages = await parse([fullBuffer]) + verifyMessages(messages) + }) + + var splitAndVerifyTwoMessages = async function (split: number) { + var firstBuffer = Buffer.alloc(fullBuffer.length - split) + var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length) + fullBuffer.copy(firstBuffer, 0, 0) + fullBuffer.copy(secondBuffer, 0, firstBuffer.length) + const messages = await parse([firstBuffer, secondBuffer]) + verifyMessages(messages) + } + + describe('recieves both messages when packet is split', function () { + it('in the middle', function () { + return splitAndVerifyTwoMessages(11) + }) + it('at the front', function () { + return Promise.all([ + splitAndVerifyTwoMessages(fullBuffer.length - 1), + splitAndVerifyTwoMessages(fullBuffer.length - 4), + splitAndVerifyTwoMessages(fullBuffer.length - 6) + ]) + }) + + it('at the end', function () { + return Promise.all([ + splitAndVerifyTwoMessages(8), + splitAndVerifyTwoMessages(1) + ]) + }) + }) + }) + +}) diff --git a/packages/pg-packet-stream/src/index.ts b/packages/pg-packet-stream/src/index.ts index 95f0e82c6..4c03d9874 100644 --- a/packages/pg-packet-stream/src/index.ts +++ b/packages/pg-packet-stream/src/index.ts @@ -15,17 +15,12 @@ export type Packet = { type FieldFormat = "text" | "binary" -class Field { - constructor(public name: string) { - - } - -} - const emptyBuffer = Buffer.allocUnsafe(0); class BufferReader { private buffer: Buffer = emptyBuffer; + // TODO(bmc): support non-utf8 encoding + private encoding: string = 'utf-8'; constructor(private offset: number = 0) { } @@ -48,12 +43,19 @@ class BufferReader { } public string(length: number): string { - // TODO(bmc): support non-utf8 encoding - const result = this.buffer.toString('utf8', this.offset, this.offset + length) + const result = this.buffer.toString(this.encoding, this.offset, this.offset + length) this.offset += length; return result; } + public cstring(): string { + var start = this.offset + var end = this.buffer.indexOf(0, start) + this.offset = end + 1 + return this.buffer.toString(this.encoding, start, end) + + } + public bytes(length: number): Buffer { const result = this.buffer.slice(this.offset, this.offset + length); this.offset += length; @@ -82,20 +84,60 @@ const closeComplete = { length: 5, } +const noData = { + name: 'noData', + length: 5 +} + +const portalSuspended = { + name: 'portalSuspended', + length: 5, +} + +const replicationStart = { + name: 'replicationStart', + length: 4, +} + +const emptyQuery = { + name: 'emptyQuery', + length: 4, +} + +enum MessageCodes { + DataRow = 0x44, // D + ParseComplete = 0x31, // 1 + BindComplete = 0x32, // 2 + CloseComplete = 0x33, // 3 + CommandComplete = 0x43, // C + ReadyForQuery = 0x5a, // Z + NoData = 0x6e, // n + NotificationResponse = 0x41, // A + AuthenticationResponse = 0x52, // R + ParameterStatus = 0x53, // S + BackendKeyData = 0x4b, // K + ErrorMessage = 0x45, // E + NoticeMessage = 0x4e, // N + RowDescriptionMessage = 0x54, // T + PortalSuspended = 0x73, // s + ReplicationStart = 0x57, // W + EmptyQuery = 0x49, // I +} + export class PgPacketStream extends Transform { private remainingBuffer: Buffer = emptyBuffer; private reader = new BufferReader(); private mode: Mode; - constructor(opts: StreamOptions) { + constructor(opts?: StreamOptions) { super({ ...opts, readableObjectMode: true }) - if (opts.mode === 'binary') { + if (opts?.mode === 'binary') { throw new Error('Binary mode not supported yet') } - this.mode = opts.mode; + this.mode = opts?.mode || 'text'; } public _transform(buffer: Buffer, encoding: string, callback: TransformCallback) { @@ -111,7 +153,7 @@ export class PgPacketStream extends Transform { const fullMessageLength = CODE_LENGTH + length; if (fullMessageLength + offset <= combinedBuffer.byteLength) { - this.handlePacket(offset, code, length, combinedBuffer); + this.handlePacket(offset + CODE_LENGTH + LEN_LENGTH, code, length, combinedBuffer); offset += fullMessageLength; } else { break; @@ -127,22 +169,62 @@ export class PgPacketStream extends Transform { callback(null); } - private handlePacket(offset: number, code: number, length: number, combinedBuffer: Buffer) { + private handlePacket(offset: number, code: number, length: number, bytes: Buffer) { switch (code) { - case 0x44: // D - this.parseDataRowMessage(offset, length, combinedBuffer); + case MessageCodes.DataRow: + this.parseDataRowMessage(offset, length, bytes); break; - case 0x32: // 2 + case MessageCodes.BindComplete: this.emit('message', bindComplete); break; - case 0x31: // 1 + case MessageCodes.ParseComplete: this.emit('message', parseComplete); break; - case 0x33: // 3 + case MessageCodes.CloseComplete: this.emit('message', closeComplete); break; + case MessageCodes.NoData: + this.emit('message', noData); + break; + case MessageCodes.PortalSuspended: + this.emit('message', portalSuspended); + break; + case MessageCodes.CommandComplete: + this.parseCommandCompleteMessage(offset, length, bytes); + break; + case MessageCodes.ReplicationStart: + this.emit('message', replicationStart); + break; + case MessageCodes.EmptyQuery: + this.emit('message', emptyQuery); + break; + case MessageCodes.ReadyForQuery: + this.parseReadyForQueryMessage(offset, length, bytes); + break; + case MessageCodes.NotificationResponse: + this.parseNotificationMessage(offset, length, bytes); + break; + case MessageCodes.AuthenticationResponse: + this.parseAuthenticationResponse(offset, length, bytes); + break; + case MessageCodes.ParameterStatus: + this.parseParameterStatusMessage(offset, length, bytes); + break; + case MessageCodes.BackendKeyData: + this.parseBackendKeyData(offset, length, bytes); + break; + case MessageCodes.ErrorMessage: + this.parseErrorMessage(offset, length, bytes, 'error'); + break; + case MessageCodes.NoticeMessage: + this.parseErrorMessage(offset, length, bytes, 'notice'); + break; + case MessageCodes.RowDescriptionMessage: + this.parseRowDescriptionMessage(offset, length, bytes); + break; default: - const packet = combinedBuffer.slice(offset, CODE_LENGTH + length + offset) + throw new Error('unhanled code: ' + code.toString(16)) + const packet = bytes.slice(offset, CODE_LENGTH + length + offset) this.push({ code, length, packet, buffer: packet.slice(5) }) } } @@ -150,8 +232,52 @@ export class PgPacketStream extends Transform { public _flush(callback: TransformCallback) { } + private parseReadyForQueryMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes); + const status = this.reader.string(1); + const message = new ReadyForQueryMessage(length, status) + this.emit('message', message) + } + + private parseCommandCompleteMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes); + const text = this.reader.cstring(); + const message = new CommandCompleteMessage(length, text); + this.emit('message', message) + } + + private parseNotificationMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes); + const processId = this.reader.int32(); + const channel = this.reader.cstring(); + const payload = this.reader.cstring(); + const message = new NotificationResponseMessage(length, processId, channel, payload); + this.emit('message', message) + } + + private parseRowDescriptionMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes); + const fieldCount = this.reader.int16() + const message = new RowDescriptionMessage(length, fieldCount); + for (let i = 0; i < fieldCount; i++) { + message.fields[i] = this.parseField() + } + this.emit('message', message); + } + + private parseField(): Field { + const name = this.reader.cstring() + const tableID = this.reader.int32() + const columnID = this.reader.int16() + const dataTypeID = this.reader.int32() + const dataTypeSize = this.reader.int16() + const dataTypeModifier = this.reader.int32() + const mode = this.reader.int16() === 0 ? 'text' : 'binary'; + return new Field(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, mode) + } + private parseDataRowMessage(offset: number, length: number, bytes: Buffer) { - this.reader.setBuffer(offset + 5, bytes); + this.reader.setBuffer(offset, bytes); const fieldCount = this.reader.int16(); const fields: any[] = new Array(fieldCount); for (let i = 0; i < fieldCount; i++) { @@ -165,6 +291,176 @@ export class PgPacketStream extends Transform { const message = new DataRowMessage(length, fields); this.emit('message', message); } + + private parseParameterStatusMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes); + const name = this.reader.cstring(); + const value = this.reader.cstring() + const msg = new ParameterStatusMessage(length, name, value) + this.emit('message', msg) + } + + private parseBackendKeyData(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes); + const processID = this.reader.int32() + const secretKey = this.reader.int32() + const msg = new BackendKeyDataMessage(length, processID, secretKey) + this.emit('message', msg) + } + + + public parseAuthenticationResponse(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes); + const code = this.reader.int32() + // TODO(bmc): maybe better types here + const msg: any = { + name: 'authenticationOk', + length, + }; + + switch (code) { + case 0: // AuthenticationOk + break; + case 3: // AuthenticationCleartextPassword + if (msg.length === 8) { + msg.name = 'authenticationCleartextPassword' + } + break + case 5: // AuthenticationMD5Password + if (msg.length === 12) { + msg.name = 'authenticationMD5Password' + msg.salt = this.reader.bytes(4); + } + break + case 10: // AuthenticationSASL + msg.name = 'authenticationSASL' + msg.mechanisms = [] + let mechanism: string; + do { + mechanism = this.reader.cstring() + + if (mechanism) { + msg.mechanisms.push(mechanism) + } + } while (mechanism) + break; + case 11: // AuthenticationSASLContinue + msg.name = 'authenticationSASLContinue' + msg.data = this.reader.string(length - 4) + break; + case 12: // AuthenticationSASLFinal + msg.name = 'authenticationSASLFinal' + msg.data = this.reader.string(length - 4) + break; + default: + throw new Error('Unknown authenticationOk message type ' + code) + } + this.emit('message', msg) + } + + private parseErrorMessage(offset: number, length: number, bytes: Buffer, name: string) { + this.reader.setBuffer(offset, bytes); + var fields: Record = {} + var fieldType = this.reader.string(1) + while (fieldType !== '\0') { + fields[fieldType] = this.reader.cstring() + fieldType = this.reader.string(1) + } + + // the msg is an Error instance + var msg = new DatabaseError(fields.M, length, name) + + msg.severity = fields.S + msg.code = fields.C + msg.detail = fields.D + msg.hint = fields.H + msg.position = fields.P + msg.internalPosition = fields.p + msg.internalQuery = fields.q + msg.where = fields.W + msg.schema = fields.s + msg.table = fields.t + msg.column = fields.c + msg.dataType = fields.d + msg.constraint = fields.n + msg.file = fields.F + msg.line = fields.L + msg.routine = fields.R + this.emit('message', msg); + + } +} + +class DatabaseError extends Error { + public severity: string | undefined; + public code: string | undefined; + public detail: string | undefined; + public hint: string | undefined; + public position: string | undefined; + public internalPosition: string | undefined; + public internalQuery: string | undefined; + public where: string | undefined; + public schema: string | undefined; + public table: string | undefined; + public column: string | undefined; + public dataType: string | undefined; + public constraint: string | undefined; + public file: string | undefined; + public line: string | undefined; + public routine: string | undefined; + constructor(message: string, public readonly length: number, public readonly name: string) { + super(message) + } +} + +class Field { + constructor(public readonly name: string, public readonly tableID: number, public readonly columnID: number, public readonly dataTypeID: number, public readonly dataTypeSize: number, public readonly dataTypeModifier: number, public readonly format: FieldFormat) { + + } + +} + +class RowDescriptionMessage { + public readonly name: string = 'rowDescription'; + public readonly fields: Field[]; + constructor(public readonly length: number, public readonly fieldCount: number) { + this.fields = new Array(this.fieldCount) + } +} + +class ParameterStatusMessage { + public readonly name: string = 'parameterStatus'; + constructor(public readonly length: number, public readonly parameterName: string, public readonly parameterValue: string) { + + } +} + +class BackendKeyDataMessage { + public readonly name: string = 'backendKeyData'; + constructor(public readonly length: number, public readonly processID: number, public readonly secretKey: number) { + + } +} + +class NotificationResponseMessage { + public readonly name: string = 'notification'; + constructor(public readonly length: number, public readonly processId: number, public readonly channel: string, public readonly payload: string) { + + } +} + +class ReadyForQueryMessage { + public readonly name: string = 'readyForQuery'; + constructor(public readonly length: number, public readonly status: string) { + + } +} + +class CommandCompleteMessage { + public readonly name: string = 'commandComplete' + constructor(public readonly length: number, public readonly text: string) { + + } } diff --git a/packages/pg-packet-stream/src/testing/buffer-list.ts b/packages/pg-packet-stream/src/testing/buffer-list.ts new file mode 100644 index 000000000..6487ea0b3 --- /dev/null +++ b/packages/pg-packet-stream/src/testing/buffer-list.ts @@ -0,0 +1,75 @@ +export default class BufferList { + constructor(public buffers: Buffer[] = []) { + + } + + public add(buffer: Buffer, front?: boolean) { + this.buffers[front ? 'unshift' : 'push'](buffer) + return this + } + + public addInt16(val: number, front?: boolean) { + return this.add(Buffer.from([(val >>> 8), (val >>> 0)]), front) + } + + public getByteLength(initial?: number) { + return this.buffers.reduce(function (previous, current) { + return previous + current.length + }, initial || 0) + } + + public addInt32(val: number, first?: boolean) { + return this.add(Buffer.from([ + (val >>> 24 & 0xFF), + (val >>> 16 & 0xFF), + (val >>> 8 & 0xFF), + (val >>> 0 & 0xFF) + ]), first) + } + + public addCString(val: string, front?: boolean) { + var len = Buffer.byteLength(val) + var buffer = Buffer.alloc(len + 1) + buffer.write(val) + buffer[len] = 0 + return this.add(buffer, front) + } + + public addString(val: string, front?: boolean) { + var len = Buffer.byteLength(val) + var buffer = Buffer.alloc(len) + buffer.write(val) + return this.add(buffer, front) + } + + public addChar(char: string, first?: boolean) { + return this.add(Buffer.from(char, 'utf8'), first) + } + + public join(appendLength?: boolean, char?: string): Buffer { + var length = this.getByteLength() + if (appendLength) { + this.addInt32(length + 4, true) + return this.join(false, char) + } + if (char) { + this.addChar(char, true) + length++ + } + var result = Buffer.alloc(length) + var index = 0 + this.buffers.forEach(function (buffer) { + buffer.copy(result, index, 0) + index += buffer.length + }) + return result + } + + public static concat(): Buffer { + var total = new BufferList() + for (var i = 0; i < arguments.length; i++) { + total.add(arguments[i]) + } + return total.join() + } +} diff --git a/packages/pg-packet-stream/src/testing/test-buffers.ts b/packages/pg-packet-stream/src/testing/test-buffers.ts new file mode 100644 index 000000000..e0c71e023 --- /dev/null +++ b/packages/pg-packet-stream/src/testing/test-buffers.ts @@ -0,0 +1,151 @@ +// http://developer.postgresql.org/pgdocs/postgres/protocol-message-formats.html +import BufferList from './buffer-list' + +const buffers = { + readyForQuery: function () { + return new BufferList() + .add(Buffer.from('I')) + .join(true, 'Z') + }, + + authenticationOk: function () { + return new BufferList() + .addInt32(0) + .join(true, 'R') + }, + + authenticationCleartextPassword: function () { + return new BufferList() + .addInt32(3) + .join(true, 'R') + }, + + authenticationMD5Password: function () { + return new BufferList() + .addInt32(5) + .add(Buffer.from([1, 2, 3, 4])) + .join(true, 'R') + }, + + authenticationSASL: function () { + return new BufferList() + .addInt32(10) + .addCString('SCRAM-SHA-256') + .addCString('') + .join(true, 'R') + }, + + authenticationSASLContinue: function () { + return new BufferList() + .addInt32(11) + .addString('data') + .join(true, 'R') + }, + + authenticationSASLFinal: function () { + return new BufferList() + .addInt32(12) + .addString('data') + .join(true, 'R') + }, + + parameterStatus: function (name: string, value: string) { + return new BufferList() + .addCString(name) + .addCString(value) + .join(true, 'S') + }, + + backendKeyData: function (processID: number, secretKey: number) { + return new BufferList() + .addInt32(processID) + .addInt32(secretKey) + .join(true, 'K') + }, + + commandComplete: function (string: string) { + return new BufferList() + .addCString(string) + .join(true, 'C') + }, + + rowDescription: function (fields: any[]) { + fields = fields || [] + var buf = new BufferList() + buf.addInt16(fields.length) + fields.forEach(function (field) { + buf.addCString(field.name) + .addInt32(field.tableID || 0) + .addInt16(field.attributeNumber || 0) + .addInt32(field.dataTypeID || 0) + .addInt16(field.dataTypeSize || 0) + .addInt32(field.typeModifier || 0) + .addInt16(field.formatCode || 0) + }) + return buf.join(true, 'T') + }, + + dataRow: function (columns: any[]) { + columns = columns || [] + var buf = new BufferList() + buf.addInt16(columns.length) + columns.forEach(function (col) { + if (col == null) { + buf.addInt32(-1) + } else { + var strBuf = Buffer.from(col, 'utf8') + buf.addInt32(strBuf.length) + buf.add(strBuf) + } + }) + return buf.join(true, 'D') + }, + + error: function (fields: any) { + return buffers.errorOrNotice(fields).join(true, 'E') + }, + + notice: function (fields: any) { + return buffers.errorOrNotice(fields).join(true, 'N') + }, + + errorOrNotice: function (fields: any) { + fields = fields || [] + var buf = new BufferList() + fields.forEach(function (field: any) { + buf.addChar(field.type) + buf.addCString(field.value) + }) + return buf.add(Buffer.from([0]))// terminator + }, + + parseComplete: function () { + return new BufferList().join(true, '1') + }, + + bindComplete: function () { + return new BufferList().join(true, '2') + }, + + notification: function (id: number, channel: string, payload: string) { + return new BufferList() + .addInt32(id) + .addCString(channel) + .addCString(payload) + .join(true, 'A') + }, + + emptyQuery: function () { + return new BufferList().join(true, 'I') + }, + + portalSuspended: function () { + return new BufferList().join(true, 's') + }, + + closeComplete: function () { + return new BufferList().join(true, '3') + } +} + +export default buffers From e500479382c12b661605b2e7f246e2474701e821 Mon Sep 17 00:00:00 2001 From: "Brian M. Carlson" Date: Thu, 19 Dec 2019 14:41:05 -0600 Subject: [PATCH 04/10] Add streaming parser --- .gitignore | 1 + packages/pg-packet-stream/src/index.ts | 15 ++-------- packages/pg/lib/connection-fast.js | 41 +++++++++++++------------- 3 files changed, 25 insertions(+), 32 deletions(-) diff --git a/.gitignore b/.gitignore index 56eba3953..df95fda07 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,4 @@ node_modules/ package-lock.json *.swp dist +.DS_Store diff --git a/packages/pg-packet-stream/src/index.ts b/packages/pg-packet-stream/src/index.ts index 4c03d9874..adc158d6d 100644 --- a/packages/pg-packet-stream/src/index.ts +++ b/packages/pg-packet-stream/src/index.ts @@ -1,11 +1,9 @@ import { Transform, TransformCallback, TransformOptions } from 'stream'; -import assert from 'assert' -export const hello = () => 'Hello world!' - -// this is a single byte +// every message is prefixed with a single bye const CODE_LENGTH = 1; -// this is a Uint32 +// every message has an int32 length which includes itself but does +// NOT include the code in the length const LEN_LENGTH = 4; export type Packet = { @@ -415,9 +413,7 @@ class DatabaseError extends Error { class Field { constructor(public readonly name: string, public readonly tableID: number, public readonly columnID: number, public readonly dataTypeID: number, public readonly dataTypeSize: number, public readonly dataTypeModifier: number, public readonly format: FieldFormat) { - } - } class RowDescriptionMessage { @@ -438,32 +434,27 @@ class ParameterStatusMessage { class BackendKeyDataMessage { public readonly name: string = 'backendKeyData'; constructor(public readonly length: number, public readonly processID: number, public readonly secretKey: number) { - } } class NotificationResponseMessage { public readonly name: string = 'notification'; constructor(public readonly length: number, public readonly processId: number, public readonly channel: string, public readonly payload: string) { - } } class ReadyForQueryMessage { public readonly name: string = 'readyForQuery'; constructor(public readonly length: number, public readonly status: string) { - } } class CommandCompleteMessage { public readonly name: string = 'commandComplete' constructor(public readonly length: number, public readonly text: string) { - } } - class DataRowMessage { public readonly fieldCount: number; public readonly name: string = 'dataRow' diff --git a/packages/pg/lib/connection-fast.js b/packages/pg/lib/connection-fast.js index aea9eacd4..58e63dac4 100644 --- a/packages/pg/lib/connection-fast.js +++ b/packages/pg/lib/connection-fast.js @@ -138,26 +138,27 @@ Connection.prototype.attachListeners = function (stream) { }) } -// Connection.prototype.attachListeners = function (stream) { -// var self = this -// const mode = this._mode === TEXT_MODE ? 'text' : 'binary'; -// const packetStream = new PacketStream.PgPacketStream({ mode }) -// packetStream.on('message', (msg) => { -// self.emit(msg.name, msg) -// }) -// stream.pipe(packetStream).on('data', (packet) => { -// // console.log('buff', packet) -// var msg = self.parseMessage(packet) -// var eventName = msg.name === 'error' ? 'errorMessage' : msg.name -// if (self._emitMessage) { -// self.emit('message', msg) -// } -// self.emit(eventName, msg) -// }) -// stream.on('end', function () { -// self.emit('end') -// }) -// } +Connection.prototype.attachListeners = function (stream) { + var self = this + const mode = this._mode === TEXT_MODE ? 'text' : 'binary'; + const packetStream = new PacketStream.PgPacketStream({ mode }) + packetStream.on('message', (msg) => { + var eventName = msg.name === 'error' ? 'errorMessage' : msg.name + self.emit(eventName, msg) + }) + stream.pipe(packetStream).on('data', (packet) => { + // console.log('buff', packet) + var msg = self.parseMessage(packet) + var eventName = msg.name === 'error' ? 'errorMessage' : msg.name + if (self._emitMessage) { + self.emit('message', msg) + } + self.emit(eventName, msg) + }) + stream.on('end', function () { + self.emit('end') + }) +} Connection.prototype.requestSsl = function () { var bodyBuffer = this.writer From a7c70a9acf835ad1ab3eb44d476077cd6a9ec554 Mon Sep 17 00:00:00 2001 From: "Brian M. Carlson" Date: Thu, 19 Dec 2019 17:44:00 -0600 Subject: [PATCH 05/10] All tests passing --- .../src/inbound-parser.test.ts | 41 ++++++++++++ packages/pg-packet-stream/src/index.test.ts | 2 +- packages/pg-packet-stream/src/index.ts | 66 +++++++++++++++++++ .../src/testing/buffer-list.ts | 4 ++ .../src/testing/test-buffers.ts | 32 +++++++++ 5 files changed, 144 insertions(+), 1 deletion(-) diff --git a/packages/pg-packet-stream/src/inbound-parser.test.ts b/packages/pg-packet-stream/src/inbound-parser.test.ts index bdfb8a3b1..b5f2eab6f 100644 --- a/packages/pg-packet-stream/src/inbound-parser.test.ts +++ b/packages/pg-packet-stream/src/inbound-parser.test.ts @@ -340,6 +340,47 @@ describe('PgPacketStream', function () { }) }) + describe('copy', () => { + testForMessage(buffers.copyIn(0), { + name: 'copyInResponse', + length: 7, + binary: false, + columnTypes: [] + }) + + testForMessage(buffers.copyIn(2), { + name: 'copyInResponse', + length: 11, + binary: false, + columnTypes: [0, 1] + }) + + testForMessage(buffers.copyOut(0), { + name: 'copyOutResponse', + length: 7, + binary: false, + columnTypes: [] + }) + + testForMessage(buffers.copyOut(3), { + name: 'copyOutResponse', + length: 13, + binary: false, + columnTypes: [0, 1, 2] + }) + + testForMessage(buffers.copyDone(), { + name: 'copyDone', + length: 4, + }) + + testForMessage(buffers.copyData(Buffer.from([5, 6, 7])), { + name: 'copyData', + length: 7, + chunk: Buffer.from([5, 6, 7]) + }) + }) + // since the data message on a stream can randomly divide the incomming // tcp packets anywhere, we need to make sure we can parse every single diff --git a/packages/pg-packet-stream/src/index.test.ts b/packages/pg-packet-stream/src/index.test.ts index f5be4e2a0..1962329c5 100644 --- a/packages/pg-packet-stream/src/index.test.ts +++ b/packages/pg-packet-stream/src/index.test.ts @@ -29,7 +29,7 @@ const emptyMessage = Buffer.from([0x0a, 0x00, 0x00, 0x00, 0x04]) const oneByteMessage = Buffer.from([0x0b, 0x00, 0x00, 0x00, 0x05, 0x0a]) const bigMessage = Buffer.from([0x0f, 0x00, 0x00, 0x00, 0x14, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e0, 0x0f]) -describe('PgPacketStream', () => { +describe.skip('PgPacketStream', () => { it('should chunk a perfect input packet', async () => { const stream = new PgPacketStream() stream.write(Buffer.from([0x01, 0x00, 0x00, 0x00, 0x04])) diff --git a/packages/pg-packet-stream/src/index.ts b/packages/pg-packet-stream/src/index.ts index adc158d6d..c7baa6d0b 100644 --- a/packages/pg-packet-stream/src/index.ts +++ b/packages/pg-packet-stream/src/index.ts @@ -34,6 +34,12 @@ class BufferReader { return result; } + public byte() { + const result = this.buffer[this.offset]; + this.offset++; + return result; + } + public int32() { const result = this.buffer.readInt32BE(this.offset); this.offset += 4; @@ -102,6 +108,11 @@ const emptyQuery = { length: 4, } +const copyDone = { + name: 'copyDone', + length: 4, +} + enum MessageCodes { DataRow = 0x44, // D ParseComplete = 0x31, // 1 @@ -120,6 +131,10 @@ enum MessageCodes { PortalSuspended = 0x73, // s ReplicationStart = 0x57, // W EmptyQuery = 0x49, // I + CopyIn = 0x47, // G + CopyOut = 0x48, // H + CopyDone = 0x63, // c + CopyData = 0x64, // d } export class PgPacketStream extends Transform { @@ -187,6 +202,9 @@ export class PgPacketStream extends Transform { case MessageCodes.PortalSuspended: this.emit('message', portalSuspended); break; + case MessageCodes.CopyDone: + this.emit('message', copyDone); + break; case MessageCodes.CommandComplete: this.parseCommandCompleteMessage(offset, length, bytes); break; @@ -220,6 +238,15 @@ export class PgPacketStream extends Transform { case MessageCodes.RowDescriptionMessage: this.parseRowDescriptionMessage(offset, length, bytes); break; + case MessageCodes.CopyIn: + this.parseCopyInMessage(offset, length, bytes); + break; + case MessageCodes.CopyOut: + this.parseCopyOutMessage(offset, length, bytes); + break; + case MessageCodes.CopyData: + this.parseCopyData(offset, length, bytes); + break; default: throw new Error('unhanled code: ' + code.toString(16)) const packet = bytes.slice(offset, CODE_LENGTH + length + offset) @@ -244,6 +271,31 @@ export class PgPacketStream extends Transform { this.emit('message', message) } + private parseCopyData(offset: number, length: number, bytes: Buffer) { + const chunk = bytes.slice(offset, offset + (length - 4)); + const message = new CopyDataMessage(length, chunk); + this.emit('message', message) + } + + private parseCopyInMessage(offset: number, length: number, bytes: Buffer) { + this.parseCopyMessage(offset, length, bytes, 'copyInResponse') + } + + private parseCopyOutMessage(offset: number, length: number, bytes: Buffer) { + this.parseCopyMessage(offset, length, bytes, 'copyOutResponse') + } + + private parseCopyMessage(offset: number, length: number, bytes: Buffer, messageName: string) { + this.reader.setBuffer(offset, bytes); + const isBinary = this.reader.byte() !== 0; + const columnCount = this.reader.int16() + const message = new CopyResponse(length, messageName, isBinary, columnCount); + for (let i = 0; i < columnCount; i++) { + message.columnTypes[i] = this.reader.int16(); + } + this.emit('message', message); + } + private parseNotificationMessage(offset: number, length: number, bytes: Buffer) { this.reader.setBuffer(offset, bytes); const processId = this.reader.int32(); @@ -411,6 +463,20 @@ class DatabaseError extends Error { } } +class CopyDataMessage { + public readonly name = 'copyData'; + constructor(public readonly length: number, public readonly chunk: Buffer) { + + } +} + +class CopyResponse { + public readonly columnTypes: number[]; + constructor(public readonly length: number, public readonly name: string, public readonly binary: boolean, columnCount: number) { + this.columnTypes = new Array(columnCount); + } +} + class Field { constructor(public readonly name: string, public readonly tableID: number, public readonly columnID: number, public readonly dataTypeID: number, public readonly dataTypeSize: number, public readonly dataTypeModifier: number, public readonly format: FieldFormat) { } diff --git a/packages/pg-packet-stream/src/testing/buffer-list.ts b/packages/pg-packet-stream/src/testing/buffer-list.ts index 6487ea0b3..51812bce4 100644 --- a/packages/pg-packet-stream/src/testing/buffer-list.ts +++ b/packages/pg-packet-stream/src/testing/buffer-list.ts @@ -46,6 +46,10 @@ export default class BufferList { return this.add(Buffer.from(char, 'utf8'), first) } + public addByte(byte: number) { + return this.add(Buffer.from([byte])) + } + public join(appendLength?: boolean, char?: string): Buffer { var length = this.getByteLength() if (appendLength) { diff --git a/packages/pg-packet-stream/src/testing/test-buffers.ts b/packages/pg-packet-stream/src/testing/test-buffers.ts index e0c71e023..0594eaadc 100644 --- a/packages/pg-packet-stream/src/testing/test-buffers.ts +++ b/packages/pg-packet-stream/src/testing/test-buffers.ts @@ -145,6 +145,38 @@ const buffers = { closeComplete: function () { return new BufferList().join(true, '3') + }, + + copyIn: function (cols: number) { + const list = new BufferList() + // text mode + .addByte(0) + // column count + .addInt16(cols); + for (let i = 0; i < cols; i++) { + list.addInt16(i); + } + return list.join(true, 'G') + }, + + copyOut: function (cols: number) { + const list = new BufferList() + // text mode + .addByte(0) + // column count + .addInt16(cols); + for (let i = 0; i < cols; i++) { + list.addInt16(i); + } + return list.join(true, 'H') + }, + + copyData: function (bytes: Buffer) { + return new BufferList().add(bytes).join(true, 'd'); + }, + + copyDone: function () { + return new BufferList().join(true, 'c') } } From b0be9da9863a957f38fd3914551afa91d39bf5ff Mon Sep 17 00:00:00 2001 From: "Brian M. Carlson" Date: Thu, 19 Dec 2019 18:57:48 -0600 Subject: [PATCH 06/10] Cleanup --- packages/pg-packet-stream/src/BufferReader.ts | 44 +++ .../src/inbound-parser.test.ts | 66 ++-- packages/pg-packet-stream/src/index.ts | 368 ++++-------------- packages/pg-packet-stream/src/messages.ts | 134 +++++++ packages/pg/bench.js | 41 +- packages/pg/lib/connection-fast.js | 368 +----------------- packages/pg/lib/result.js | 12 +- 7 files changed, 332 insertions(+), 701 deletions(-) create mode 100644 packages/pg-packet-stream/src/BufferReader.ts create mode 100644 packages/pg-packet-stream/src/messages.ts diff --git a/packages/pg-packet-stream/src/BufferReader.ts b/packages/pg-packet-stream/src/BufferReader.ts new file mode 100644 index 000000000..9729d919f --- /dev/null +++ b/packages/pg-packet-stream/src/BufferReader.ts @@ -0,0 +1,44 @@ +const emptyBuffer = Buffer.allocUnsafe(0); + +export class BufferReader { + private buffer: Buffer = emptyBuffer; + // TODO(bmc): support non-utf8 encoding + private encoding: string = 'utf-8'; + constructor(private offset: number = 0) { + } + public setBuffer(offset: number, buffer: Buffer): void { + this.offset = offset; + this.buffer = buffer; + } + public int16() { + const result = this.buffer.readInt16BE(this.offset); + this.offset += 2; + return result; + } + public byte() { + const result = this.buffer[this.offset]; + this.offset++; + return result; + } + public int32() { + const result = this.buffer.readInt32BE(this.offset); + this.offset += 4; + return result; + } + public string(length: number): string { + const result = this.buffer.toString(this.encoding, this.offset, this.offset + length); + this.offset += length; + return result; + } + public cstring(): string { + var start = this.offset; + var end = this.buffer.indexOf(0, start); + this.offset = end + 1; + return this.buffer.toString(this.encoding, start, end); + } + public bytes(length: number): Buffer { + const result = this.buffer.slice(this.offset, this.offset + length); + this.offset += length; + return result; + } +} diff --git a/packages/pg-packet-stream/src/inbound-parser.test.ts b/packages/pg-packet-stream/src/inbound-parser.test.ts index b5f2eab6f..098f41242 100644 --- a/packages/pg-packet-stream/src/inbound-parser.test.ts +++ b/packages/pg-packet-stream/src/inbound-parser.test.ts @@ -2,6 +2,7 @@ import buffers from './testing/test-buffers' import BufferList from './testing/buffer-list' import { PgPacketStream } from './' import assert from 'assert' +import { Readable } from 'stream' var authOkBuffer = buffers.authenticationOk() var paramStatusBuffer = buffers.parameterStatus('client_encoding', 'UTF8') @@ -136,23 +137,25 @@ var expectedTwoRowMessage = { }] } +const concat = (stream: Readable): Promise => { + return new Promise((resolve) => { + const results: any[] = [] + stream.on('data', item => results.push(item)) + stream.on('end', () => resolve(results)) + }) +} + var testForMessage = function (buffer: Buffer, expectedMessage: any) { it('recieves and parses ' + expectedMessage.name, async () => { const parser = new PgPacketStream(); + parser.write(buffer); + parser.end(); + const [lastMessage] = await concat(parser); - await new Promise((resolve) => { - let lastMessage: any = {} - parser.on('message', function (msg) { - lastMessage = msg - }) - - parser.write(buffer); + for (const key in expectedMessage) { + assert.deepEqual(lastMessage[key], expectedMessage[key]) + } - for (const key in expectedMessage) { - assert.deepEqual(lastMessage[key], expectedMessage[key]) - } - resolve(); - }) }) } @@ -388,17 +391,14 @@ describe('PgPacketStream', function () { describe('split buffer, single message parsing', function () { var fullBuffer = buffers.dataRow([null, 'bang', 'zug zug', null, '!']) - const parse = (buffers: Buffer[]): Promise => { - return new Promise((resolve) => { - const parser = new PgPacketStream(); - parser.once('message', (msg) => { - resolve(msg) - }) - for (const buffer of buffers) { - parser.write(buffer); - } - parser.end() - }) + const parse = async (buffers: Buffer[]): Promise => { + const parser = new PgPacketStream(); + for (const buffer of buffers) { + parser.write(buffer); + } + parser.end() + const [msg] = await concat(parser) + return msg; } it('parses when full buffer comes in', async function () { @@ -448,20 +448,12 @@ describe('PgPacketStream', function () { readyForQueryBuffer.copy(fullBuffer, dataRowBuffer.length, 0) const parse = (buffers: Buffer[]): Promise => { - return new Promise((resolve) => { - const parser = new PgPacketStream(); - const results: any[] = [] - parser.on('message', (msg) => { - results.push(msg) - if (results.length === 2) { - resolve(results) - } - }) - for (const buffer of buffers) { - parser.write(buffer); - } - parser.end() - }) + const parser = new PgPacketStream(); + for (const buffer of buffers) { + parser.write(buffer); + } + parser.end() + return concat(parser) } var verifyMessages = function (messages: any[]) { diff --git a/packages/pg-packet-stream/src/index.ts b/packages/pg-packet-stream/src/index.ts index c7baa6d0b..dc2af4246 100644 --- a/packages/pg-packet-stream/src/index.ts +++ b/packages/pg-packet-stream/src/index.ts @@ -1,4 +1,7 @@ import { Transform, TransformCallback, TransformOptions } from 'stream'; +import { Mode, bindComplete, parseComplete, closeComplete, noData, portalSuspended, copyDone, replicationStart, emptyQuery, ReadyForQueryMessage, CommandCompleteMessage, CopyDataMessage, CopyResponse, NotificationResponseMessage, RowDescriptionMessage, Field, DataRowMessage, ParameterStatusMessage, BackendKeyDataMessage, DatabaseError, BackendMessage } from './messages'; +import { BufferReader } from './BufferReader'; +import assert from 'assert' // every message is prefixed with a single bye const CODE_LENGTH = 1; @@ -6,114 +9,20 @@ const CODE_LENGTH = 1; // NOT include the code in the length const LEN_LENGTH = 4; +const HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH; + export type Packet = { code: number; packet: Buffer; } -type FieldFormat = "text" | "binary" - const emptyBuffer = Buffer.allocUnsafe(0); -class BufferReader { - private buffer: Buffer = emptyBuffer; - // TODO(bmc): support non-utf8 encoding - private encoding: string = 'utf-8'; - constructor(private offset: number = 0) { - - } - - public setBuffer(offset: number, buffer: Buffer): void { - this.offset = offset; - this.buffer = buffer; - } - - public int16() { - const result = this.buffer.readInt16BE(this.offset); - this.offset += 2; - return result; - } - - public byte() { - const result = this.buffer[this.offset]; - this.offset++; - return result; - } - - public int32() { - const result = this.buffer.readInt32BE(this.offset); - this.offset += 4; - return result; - } - - public string(length: number): string { - const result = this.buffer.toString(this.encoding, this.offset, this.offset + length) - this.offset += length; - return result; - } - - public cstring(): string { - var start = this.offset - var end = this.buffer.indexOf(0, start) - this.offset = end + 1 - return this.buffer.toString(this.encoding, start, end) - - } - - public bytes(length: number): Buffer { - const result = this.buffer.slice(this.offset, this.offset + length); - this.offset += length; - return result - } -} - -type Mode = 'text' | 'binary'; - type StreamOptions = TransformOptions & { mode: Mode } -const parseComplete = { - name: 'parseComplete', - length: 5, -}; - -const bindComplete = { - name: 'bindComplete', - length: 5, -} - -const closeComplete = { - name: 'closeComplete', - length: 5, -} - -const noData = { - name: 'noData', - length: 5 -} - -const portalSuspended = { - name: 'portalSuspended', - length: 5, -} - -const replicationStart = { - name: 'replicationStart', - length: 4, -} - -const emptyQuery = { - name: 'emptyQuery', - length: 4, -} - -const copyDone = { - name: 'copyDone', - length: 4, -} - -enum MessageCodes { +const enum MessageCodes { DataRow = 0x44, // D ParseComplete = 0x31, // 1 BindComplete = 0x32, // 2 @@ -154,9 +63,9 @@ export class PgPacketStream extends Transform { } public _transform(buffer: Buffer, encoding: string, callback: TransformCallback) { - const combinedBuffer = this.remainingBuffer.byteLength ? Buffer.concat([this.remainingBuffer, buffer], this.remainingBuffer.length + buffer.length) : buffer; + const combinedBuffer: Buffer = this.remainingBuffer.byteLength ? Buffer.concat([this.remainingBuffer, buffer], this.remainingBuffer.length + buffer.length) : buffer; let offset = 0; - while ((offset + CODE_LENGTH + LEN_LENGTH) <= combinedBuffer.byteLength) { + while ((offset + HEADER_LENGTH) <= combinedBuffer.byteLength) { // code is 1 byte long - it identifies the message type const code = combinedBuffer[offset]; @@ -166,7 +75,8 @@ export class PgPacketStream extends Transform { const fullMessageLength = CODE_LENGTH + length; if (fullMessageLength + offset <= combinedBuffer.byteLength) { - this.handlePacket(offset + CODE_LENGTH + LEN_LENGTH, code, length, combinedBuffer); + const message = this.handlePacket(offset + HEADER_LENGTH, code, length, combinedBuffer); + this.push(message) offset += fullMessageLength; } else { break; @@ -182,107 +92,82 @@ export class PgPacketStream extends Transform { callback(null); } - private handlePacket(offset: number, code: number, length: number, bytes: Buffer) { + private handlePacket(offset: number, code: number, length: number, bytes: Buffer): BackendMessage { switch (code) { - case MessageCodes.DataRow: - this.parseDataRowMessage(offset, length, bytes); - break; case MessageCodes.BindComplete: - this.emit('message', bindComplete); - break; + return bindComplete; case MessageCodes.ParseComplete: - this.emit('message', parseComplete); - break; + return parseComplete; case MessageCodes.CloseComplete: - this.emit('message', closeComplete); - break; + return closeComplete; case MessageCodes.NoData: - this.emit('message', noData); - break; + return noData; case MessageCodes.PortalSuspended: - this.emit('message', portalSuspended); - break; + return portalSuspended; case MessageCodes.CopyDone: - this.emit('message', copyDone); - break; - case MessageCodes.CommandComplete: - this.parseCommandCompleteMessage(offset, length, bytes); - break; + return copyDone; case MessageCodes.ReplicationStart: - this.emit('message', replicationStart); - break; + return replicationStart; case MessageCodes.EmptyQuery: - this.emit('message', emptyQuery); - break; + return emptyQuery; + case MessageCodes.DataRow: + return this.parseDataRowMessage(offset, length, bytes); + case MessageCodes.CommandComplete: + return this.parseCommandCompleteMessage(offset, length, bytes); case MessageCodes.ReadyForQuery: - this.parseReadyForQueryMessage(offset, length, bytes); - break; + return this.parseReadyForQueryMessage(offset, length, bytes); case MessageCodes.NotificationResponse: - this.parseNotificationMessage(offset, length, bytes); - break; + return this.parseNotificationMessage(offset, length, bytes); case MessageCodes.AuthenticationResponse: - this.parseAuthenticationResponse(offset, length, bytes); - break; + return this.parseAuthenticationResponse(offset, length, bytes); case MessageCodes.ParameterStatus: - this.parseParameterStatusMessage(offset, length, bytes); - break; + return this.parseParameterStatusMessage(offset, length, bytes); case MessageCodes.BackendKeyData: - this.parseBackendKeyData(offset, length, bytes); - break; + return this.parseBackendKeyData(offset, length, bytes); case MessageCodes.ErrorMessage: - this.parseErrorMessage(offset, length, bytes, 'error'); - break; + return this.parseErrorMessage(offset, length, bytes, 'error'); case MessageCodes.NoticeMessage: - this.parseErrorMessage(offset, length, bytes, 'notice'); - break; + return this.parseErrorMessage(offset, length, bytes, 'notice'); case MessageCodes.RowDescriptionMessage: - this.parseRowDescriptionMessage(offset, length, bytes); - break; + return this.parseRowDescriptionMessage(offset, length, bytes); case MessageCodes.CopyIn: - this.parseCopyInMessage(offset, length, bytes); - break; + return this.parseCopyInMessage(offset, length, bytes); case MessageCodes.CopyOut: - this.parseCopyOutMessage(offset, length, bytes); - break; + return this.parseCopyOutMessage(offset, length, bytes); case MessageCodes.CopyData: - this.parseCopyData(offset, length, bytes); - break; + return this.parseCopyData(offset, length, bytes); default: - throw new Error('unhanled code: ' + code.toString(16)) - const packet = bytes.slice(offset, CODE_LENGTH + length + offset) - this.push({ code, length, packet, buffer: packet.slice(5) }) + assert.fail(`unknown message code: ${code.toString(16)}`) } } public _flush(callback: TransformCallback) { + this._transform(Buffer.alloc(0), 'utf-i', callback) } private parseReadyForQueryMessage(offset: number, length: number, bytes: Buffer) { this.reader.setBuffer(offset, bytes); const status = this.reader.string(1); - const message = new ReadyForQueryMessage(length, status) - this.emit('message', message) + return new ReadyForQueryMessage(length, status) } private parseCommandCompleteMessage(offset: number, length: number, bytes: Buffer) { this.reader.setBuffer(offset, bytes); const text = this.reader.cstring(); - const message = new CommandCompleteMessage(length, text); - this.emit('message', message) + return new CommandCompleteMessage(length, text); } private parseCopyData(offset: number, length: number, bytes: Buffer) { const chunk = bytes.slice(offset, offset + (length - 4)); - const message = new CopyDataMessage(length, chunk); - this.emit('message', message) + return new CopyDataMessage(length, chunk); } private parseCopyInMessage(offset: number, length: number, bytes: Buffer) { - this.parseCopyMessage(offset, length, bytes, 'copyInResponse') + return this.parseCopyMessage(offset, length, bytes, 'copyInResponse') } private parseCopyOutMessage(offset: number, length: number, bytes: Buffer) { - this.parseCopyMessage(offset, length, bytes, 'copyOutResponse') + return this.parseCopyMessage(offset, length, bytes, 'copyOutResponse') } private parseCopyMessage(offset: number, length: number, bytes: Buffer, messageName: string) { @@ -293,7 +178,7 @@ export class PgPacketStream extends Transform { for (let i = 0; i < columnCount; i++) { message.columnTypes[i] = this.reader.int16(); } - this.emit('message', message); + return message; } private parseNotificationMessage(offset: number, length: number, bytes: Buffer) { @@ -301,8 +186,7 @@ export class PgPacketStream extends Transform { const processId = this.reader.int32(); const channel = this.reader.cstring(); const payload = this.reader.cstring(); - const message = new NotificationResponseMessage(length, processId, channel, payload); - this.emit('message', message) + return new NotificationResponseMessage(length, processId, channel, payload); } private parseRowDescriptionMessage(offset: number, length: number, bytes: Buffer) { @@ -312,7 +196,7 @@ export class PgPacketStream extends Transform { for (let i = 0; i < fieldCount; i++) { message.fields[i] = this.parseField() } - this.emit('message', message); + return message; } private parseField(): Field { @@ -338,24 +222,21 @@ export class PgPacketStream extends Transform { fields[i] = this.reader.string(len) } } - const message = new DataRowMessage(length, fields); - this.emit('message', message); + return new DataRowMessage(length, fields); } private parseParameterStatusMessage(offset: number, length: number, bytes: Buffer) { this.reader.setBuffer(offset, bytes); const name = this.reader.cstring(); const value = this.reader.cstring() - const msg = new ParameterStatusMessage(length, name, value) - this.emit('message', msg) + return new ParameterStatusMessage(length, name, value) } private parseBackendKeyData(offset: number, length: number, bytes: Buffer) { this.reader.setBuffer(offset, bytes); const processID = this.reader.int32() const secretKey = this.reader.int32() - const msg = new BackendKeyDataMessage(length, processID, secretKey) - this.emit('message', msg) + return new BackendKeyDataMessage(length, processID, secretKey) } @@ -363,7 +244,7 @@ export class PgPacketStream extends Transform { this.reader.setBuffer(offset, bytes); const code = this.reader.int32() // TODO(bmc): maybe better types here - const msg: any = { + const message: any = { name: 'authenticationOk', length, }; @@ -372,40 +253,40 @@ export class PgPacketStream extends Transform { case 0: // AuthenticationOk break; case 3: // AuthenticationCleartextPassword - if (msg.length === 8) { - msg.name = 'authenticationCleartextPassword' + if (message.length === 8) { + message.name = 'authenticationCleartextPassword' } break case 5: // AuthenticationMD5Password - if (msg.length === 12) { - msg.name = 'authenticationMD5Password' - msg.salt = this.reader.bytes(4); + if (message.length === 12) { + message.name = 'authenticationMD5Password' + message.salt = this.reader.bytes(4); } break case 10: // AuthenticationSASL - msg.name = 'authenticationSASL' - msg.mechanisms = [] + message.name = 'authenticationSASL' + message.mechanisms = [] let mechanism: string; do { mechanism = this.reader.cstring() if (mechanism) { - msg.mechanisms.push(mechanism) + message.mechanisms.push(mechanism) } } while (mechanism) break; case 11: // AuthenticationSASLContinue - msg.name = 'authenticationSASLContinue' - msg.data = this.reader.string(length - 4) + message.name = 'authenticationSASLContinue' + message.data = this.reader.string(length - 4) break; case 12: // AuthenticationSASLFinal - msg.name = 'authenticationSASLFinal' - msg.data = this.reader.string(length - 4) + message.name = 'authenticationSASLFinal' + message.data = this.reader.string(length - 4) break; default: throw new Error('Unknown authenticationOk message type ' + code) } - this.emit('message', msg) + return message; } private parseErrorMessage(offset: number, length: number, bytes: Buffer, name: string) { @@ -418,113 +299,24 @@ export class PgPacketStream extends Transform { } // the msg is an Error instance - var msg = new DatabaseError(fields.M, length, name) - - msg.severity = fields.S - msg.code = fields.C - msg.detail = fields.D - msg.hint = fields.H - msg.position = fields.P - msg.internalPosition = fields.p - msg.internalQuery = fields.q - msg.where = fields.W - msg.schema = fields.s - msg.table = fields.t - msg.column = fields.c - msg.dataType = fields.d - msg.constraint = fields.n - msg.file = fields.F - msg.line = fields.L - msg.routine = fields.R - this.emit('message', msg); - - } -} - -class DatabaseError extends Error { - public severity: string | undefined; - public code: string | undefined; - public detail: string | undefined; - public hint: string | undefined; - public position: string | undefined; - public internalPosition: string | undefined; - public internalQuery: string | undefined; - public where: string | undefined; - public schema: string | undefined; - public table: string | undefined; - public column: string | undefined; - public dataType: string | undefined; - public constraint: string | undefined; - public file: string | undefined; - public line: string | undefined; - public routine: string | undefined; - constructor(message: string, public readonly length: number, public readonly name: string) { - super(message) - } -} - -class CopyDataMessage { - public readonly name = 'copyData'; - constructor(public readonly length: number, public readonly chunk: Buffer) { - - } -} - -class CopyResponse { - public readonly columnTypes: number[]; - constructor(public readonly length: number, public readonly name: string, public readonly binary: boolean, columnCount: number) { - this.columnTypes = new Array(columnCount); - } -} - -class Field { - constructor(public readonly name: string, public readonly tableID: number, public readonly columnID: number, public readonly dataTypeID: number, public readonly dataTypeSize: number, public readonly dataTypeModifier: number, public readonly format: FieldFormat) { - } -} - -class RowDescriptionMessage { - public readonly name: string = 'rowDescription'; - public readonly fields: Field[]; - constructor(public readonly length: number, public readonly fieldCount: number) { - this.fields = new Array(this.fieldCount) - } -} - -class ParameterStatusMessage { - public readonly name: string = 'parameterStatus'; - constructor(public readonly length: number, public readonly parameterName: string, public readonly parameterValue: string) { - - } -} - -class BackendKeyDataMessage { - public readonly name: string = 'backendKeyData'; - constructor(public readonly length: number, public readonly processID: number, public readonly secretKey: number) { - } -} - -class NotificationResponseMessage { - public readonly name: string = 'notification'; - constructor(public readonly length: number, public readonly processId: number, public readonly channel: string, public readonly payload: string) { - } -} - -class ReadyForQueryMessage { - public readonly name: string = 'readyForQuery'; - constructor(public readonly length: number, public readonly status: string) { - } -} - -class CommandCompleteMessage { - public readonly name: string = 'commandComplete' - constructor(public readonly length: number, public readonly text: string) { - } -} - -class DataRowMessage { - public readonly fieldCount: number; - public readonly name: string = 'dataRow' - constructor(public length: number, public fields: any[]) { - this.fieldCount = fields.length; + var message = new DatabaseError(fields.M, length, name) + + message.severity = fields.S + message.code = fields.C + message.detail = fields.D + message.hint = fields.H + message.position = fields.P + message.internalPosition = fields.p + message.internalQuery = fields.q + message.where = fields.W + message.schema = fields.s + message.table = fields.t + message.column = fields.c + message.dataType = fields.d + message.constraint = fields.n + message.file = fields.F + message.line = fields.L + message.routine = fields.R + return message; } } diff --git a/packages/pg-packet-stream/src/messages.ts b/packages/pg-packet-stream/src/messages.ts new file mode 100644 index 000000000..26013cf13 --- /dev/null +++ b/packages/pg-packet-stream/src/messages.ts @@ -0,0 +1,134 @@ +export type Mode = 'text' | 'binary'; + +export type BackendMessage = { + name: string; + length: number; +} + +export const parseComplete: BackendMessage = { + name: 'parseComplete', + length: 5, +}; + +export const bindComplete: BackendMessage = { + name: 'bindComplete', + length: 5, +} + +export const closeComplete: BackendMessage = { + name: 'closeComplete', + length: 5, +} + +export const noData: BackendMessage = { + name: 'noData', + length: 5 +} + +export const portalSuspended: BackendMessage = { + name: 'portalSuspended', + length: 5, +} + +export const replicationStart: BackendMessage = { + name: 'replicationStart', + length: 4, +} + +export const emptyQuery: BackendMessage = { + name: 'emptyQuery', + length: 4, +} + +export const copyDone: BackendMessage = { + name: 'copyDone', + length: 4, +} + +export class DatabaseError extends Error { + public severity: string | undefined; + public code: string | undefined; + public detail: string | undefined; + public hint: string | undefined; + public position: string | undefined; + public internalPosition: string | undefined; + public internalQuery: string | undefined; + public where: string | undefined; + public schema: string | undefined; + public table: string | undefined; + public column: string | undefined; + public dataType: string | undefined; + public constraint: string | undefined; + public file: string | undefined; + public line: string | undefined; + public routine: string | undefined; + constructor(message: string, public readonly length: number, public readonly name: string) { + super(message) + } +} + +export class CopyDataMessage { + public readonly name = 'copyData'; + constructor(public readonly length: number, public readonly chunk: Buffer) { + + } +} + +export class CopyResponse { + public readonly columnTypes: number[]; + constructor(public readonly length: number, public readonly name: string, public readonly binary: boolean, columnCount: number) { + this.columnTypes = new Array(columnCount); + } +} + +export class Field { + constructor(public readonly name: string, public readonly tableID: number, public readonly columnID: number, public readonly dataTypeID: number, public readonly dataTypeSize: number, public readonly dataTypeModifier: number, public readonly format: Mode) { + } +} + +export class RowDescriptionMessage { + public readonly name: string = 'rowDescription'; + public readonly fields: Field[]; + constructor(public readonly length: number, public readonly fieldCount: number) { + this.fields = new Array(this.fieldCount) + } +} + +export class ParameterStatusMessage { + public readonly name: string = 'parameterStatus'; + constructor(public readonly length: number, public readonly parameterName: string, public readonly parameterValue: string) { + + } +} + +export class BackendKeyDataMessage { + public readonly name: string = 'backendKeyData'; + constructor(public readonly length: number, public readonly processID: number, public readonly secretKey: number) { + } +} + +export class NotificationResponseMessage { + public readonly name: string = 'notification'; + constructor(public readonly length: number, public readonly processId: number, public readonly channel: string, public readonly payload: string) { + } +} + +export class ReadyForQueryMessage { + public readonly name: string = 'readyForQuery'; + constructor(public readonly length: number, public readonly status: string) { + } +} + +export class CommandCompleteMessage { + public readonly name: string = 'commandComplete' + constructor(public readonly length: number, public readonly text: string) { + } +} + +export class DataRowMessage { + public readonly fieldCount: number; + public readonly name: string = 'dataRow' + constructor(public length: number, public fields: any[]) { + this.fieldCount = fields.length; + } +} diff --git a/packages/pg/bench.js b/packages/pg/bench.js index 7a7084aee..3c12fa683 100644 --- a/packages/pg/bench.js +++ b/packages/pg/bench.js @@ -1,13 +1,22 @@ const pg = require("./lib"); const pool = new pg.Pool() -const q = { +const params = { text: "select typname, typnamespace, typowner, typlen, typbyval, typcategory, typispreferred, typisdefined, typdelim, typrelid, typelem, typarray from pg_type where typtypmod = $1 and typisdefined = $2", values: [-1, true] }; -const exec = async client => { +const insert = { + text: 'INSERT INTO foobar(name, age) VALUES ($1, $2)', + values: ['brian', 100] +} + +const seq = { + text: 'SELECT * FROM generate_series(1, 1000)' +} + +const exec = async (client, q) => { const result = await client.query({ text: q.text, values: q.values, @@ -15,11 +24,11 @@ const exec = async client => { }); }; -const bench = async (client, time) => { +const bench = async (client, q, time) => { let start = Date.now(); let count = 0; while (true) { - await exec(client); + await exec(client, q); count++; if (Date.now() - start > time) { return count; @@ -30,13 +39,29 @@ const bench = async (client, time) => { const run = async () => { const client = new pg.Client(); await client.connect(); - await bench(client, 1000); + await client.query('CREATE TEMP TABLE foobar(name TEXT, age NUMERIC)') + await bench(client, params, 1000); console.log("warmup done"); const seconds = 5; - const queries = await bench(client, seconds * 1000); - console.log("queries:", queries); + + let queries = await bench(client, params, seconds * 1000); + console.log('') + console.log("little queries:", queries); + console.log("qps", queries / seconds); + console.log("on my laptop best so far seen 733 qps") + + console.log('') + queries = await bench(client, seq, seconds * 1000); + console.log("sequence queries:", queries); + console.log("qps", queries / seconds); + console.log("on my laptop best so far seen 1192 qps") + + console.log('') + queries = await bench(client, insert, seconds * 1000); + console.log("insert queries:", queries); console.log("qps", queries / seconds); - console.log("on my laptop best so far seen 713 qps") + console.log("on my laptop best so far seen 5600 qps") + await client.end(); await client.end(); }; diff --git a/packages/pg/lib/connection-fast.js b/packages/pg/lib/connection-fast.js index 58e63dac4..29752df72 100644 --- a/packages/pg/lib/connection-fast.js +++ b/packages/pg/lib/connection-fast.js @@ -12,11 +12,12 @@ var EventEmitter = require('events').EventEmitter var util = require('util') var Writer = require('buffer-writer') -var Reader = require('packet-reader') var PacketStream = require('pg-packet-stream') var TEXT_MODE = 0 -var BINARY_MODE = 1 + +// TODO(bmc) support binary mode here +// var BINARY_MODE = 1 console.log('using faster connection') var Connection = function (config) { EventEmitter.call(this) @@ -36,10 +37,6 @@ var Connection = function (config) { this._ending = false this._mode = TEXT_MODE this._emitMessage = false - this._reader = new Reader({ - headerSize: 1, - lengthPadding: -4 - }) var self = this this.on('newListener', function (eventName) { if (eventName === 'message') { @@ -120,35 +117,10 @@ Connection.prototype.connect = function (port, host) { Connection.prototype.attachListeners = function (stream) { var self = this - stream.on('data', function (buff) { - self._reader.addChunk(buff) - var packet = self._reader.read() - while (packet) { - var msg = self.parseMessage({ code: self._reader.header, length: packet.length + 4, buffer: packet }) - var eventName = msg.name === 'error' ? 'errorMessage' : msg.name - if (self._emitMessage) { - self.emit('message', msg) - } - self.emit(eventName, msg) - packet = self._reader.read() - } - }) - stream.on('end', function () { - self.emit('end') - }) -} - -Connection.prototype.attachListeners = function (stream) { - var self = this - const mode = this._mode === TEXT_MODE ? 'text' : 'binary'; + const mode = this._mode === TEXT_MODE ? 'text' : 'binary' const packetStream = new PacketStream.PgPacketStream({ mode }) - packetStream.on('message', (msg) => { - var eventName = msg.name === 'error' ? 'errorMessage' : msg.name - self.emit(eventName, msg) - }) - stream.pipe(packetStream).on('data', (packet) => { - // console.log('buff', packet) - var msg = self.parseMessage(packet) + this.stream.pipe(packetStream) + packetStream.on('data', (msg) => { var eventName = msg.name === 'error' ? 'errorMessage' : msg.name if (self._emitMessage) { self.emit('message', msg) @@ -397,332 +369,4 @@ Connection.prototype.sendCopyFail = function (msg) { this._send(0x66) } -var Message = function (name, length) { - this.name = name - this.length = length -} - -Connection.prototype.parseMessage = function (packet) { - this.offset = 0 - const { code, length, buffer } = packet - switch (code) { - case 0x52: // R - return this.parseR(buffer, length) - - case 0x53: // S - return this.parseS(buffer, length) - - case 0x4b: // K - return this.parseK(buffer, length) - - case 0x43: // C - return this.parseC(buffer, length) - - case 0x5a: // Z - return this.parseZ(buffer, length) - - case 0x54: // T - return this.parseT(buffer, length) - - case 0x44: // D - return this.parseD(buffer, length) - - case 0x45: // E - return this.parseE(buffer, length) - - case 0x4e: // N - return this.parseN(buffer, length) - - case 0x31: // 1 - return new Message('parseComplete', length) - - case 0x32: // 2 - return new Message('bindComplete', length) - - case 0x33: // 3 - return new Message('closeComplete', length) - - case 0x41: // A - return this.parseA(buffer, length) - - case 0x6e: // n - return new Message('noData', length) - - case 0x49: // I - return new Message('emptyQuery', length) - - case 0x73: // s - return new Message('portalSuspended', length) - - case 0x47: // G - return this.parseG(buffer, length) - - case 0x48: // H - return this.parseH(buffer, length) - - case 0x57: // W - return new Message('replicationStart', length) - - case 0x63: // c - return new Message('copyDone', length) - - case 0x64: // d - return this.parsed(buffer, length) - } - console.log('could not parse', packet) -} - -Connection.prototype.parseR = function (buffer, length) { - var code = this.parseInt32(buffer) - - var msg = new Message('authenticationOk', length) - - switch (code) { - case 0: // AuthenticationOk - return msg - case 3: // AuthenticationCleartextPassword - if (msg.length === 8) { - msg.name = 'authenticationCleartextPassword' - return msg - } - break - case 5: // AuthenticationMD5Password - if (msg.length === 12) { - msg.name = 'authenticationMD5Password' - msg.salt = Buffer.alloc(4) - buffer.copy(msg.salt, 0, this.offset, this.offset + 4) - this.offset += 4 - return msg - } - - break - case 10: // AuthenticationSASL - msg.name = 'authenticationSASL' - msg.mechanisms = [] - do { - var mechanism = this.parseCString(buffer) - - if (mechanism) { - msg.mechanisms.push(mechanism) - } - } while (mechanism) - - return msg - case 11: // AuthenticationSASLContinue - msg.name = 'authenticationSASLContinue' - msg.data = this.readString(buffer, length - 4) - - return msg - case 12: // AuthenticationSASLFinal - msg.name = 'authenticationSASLFinal' - msg.data = this.readString(buffer, length - 4) - - return msg - } - - throw new Error('Unknown authenticationOk message type' + util.inspect(msg)) -} - -Connection.prototype.parseS = function (buffer, length) { - var msg = new Message('parameterStatus', length) - msg.parameterName = this.parseCString(buffer) - msg.parameterValue = this.parseCString(buffer) - return msg -} - -Connection.prototype.parseK = function (buffer, length) { - var msg = new Message('backendKeyData', length) - msg.processID = this.parseInt32(buffer) - msg.secretKey = this.parseInt32(buffer) - return msg -} - -Connection.prototype.parseC = function (buffer, length) { - var msg = new Message('commandComplete', length) - msg.text = this.parseCString(buffer) - return msg -} - -Connection.prototype.parseZ = function (buffer, length) { - var msg = new Message('readyForQuery', length) - msg.name = 'readyForQuery' - msg.status = this.readString(buffer, 1) - return msg -} - -var ROW_DESCRIPTION = 'rowDescription' -Connection.prototype.parseT = function (buffer, length) { - var msg = new Message(ROW_DESCRIPTION, length) - msg.fieldCount = this.parseInt16(buffer) - var fields = [] - for (var i = 0; i < msg.fieldCount; i++) { - fields.push(this.parseField(buffer)) - } - msg.fields = fields - return msg -} - -var Field = function () { - this.name = null - this.tableID = null - this.columnID = null - this.dataTypeID = null - this.dataTypeSize = null - this.dataTypeModifier = null - this.format = null -} - -var FORMAT_TEXT = 'text' -var FORMAT_BINARY = 'binary' -Connection.prototype.parseField = function (buffer) { - var field = new Field() - field.name = this.parseCString(buffer) - field.tableID = this.parseInt32(buffer) - field.columnID = this.parseInt16(buffer) - field.dataTypeID = this.parseInt32(buffer) - field.dataTypeSize = this.parseInt16(buffer) - field.dataTypeModifier = this.parseInt32(buffer) - if (this.parseInt16(buffer) === TEXT_MODE) { - this._mode = TEXT_MODE - field.format = FORMAT_TEXT - } else { - this._mode = BINARY_MODE - field.format = FORMAT_BINARY - } - return field -} - -var DATA_ROW = 'dataRow' -var DataRowMessage = function (length, fieldCount) { - this.name = DATA_ROW - this.length = length - this.fieldCount = fieldCount - this.fields = [] -} - -// extremely hot-path code -Connection.prototype.parseD = function (buffer, length) { - var fieldCount = this.parseInt16(buffer) - var msg = new DataRowMessage(length, fieldCount) - for (var i = 0; i < fieldCount; i++) { - msg.fields.push(this._readValue(buffer)) - } - return msg -} - -// extremely hot-path code -Connection.prototype._readValue = function (buffer) { - var length = this.parseInt32(buffer) - if (length === -1) return null - if (this._mode === TEXT_MODE) { - return this.readString(buffer, length) - } - return this.readBytes(buffer, length) -} - -// parses error -Connection.prototype.parseE = function (buffer, length) { - var fields = {} - var fieldType = this.readString(buffer, 1) - while (fieldType !== '\0') { - fields[fieldType] = this.parseCString(buffer) - fieldType = this.readString(buffer, 1) - } - - // the msg is an Error instance - var msg = new Error(fields.M) - - // for compatibility with Message - msg.name = 'error' - msg.length = length - - msg.severity = fields.S - msg.code = fields.C - msg.detail = fields.D - msg.hint = fields.H - msg.position = fields.P - msg.internalPosition = fields.p - msg.internalQuery = fields.q - msg.where = fields.W - msg.schema = fields.s - msg.table = fields.t - msg.column = fields.c - msg.dataType = fields.d - msg.constraint = fields.n - msg.file = fields.F - msg.line = fields.L - msg.routine = fields.R - return msg -} - -// same thing, different name -Connection.prototype.parseN = function (buffer, length) { - var msg = this.parseE(buffer, length) - msg.name = 'notice' - return msg -} - -Connection.prototype.parseA = function (buffer, length) { - var msg = new Message('notification', length) - msg.processId = this.parseInt32(buffer) - msg.channel = this.parseCString(buffer) - msg.payload = this.parseCString(buffer) - return msg -} - -Connection.prototype.parseG = function (buffer, length) { - var msg = new Message('copyInResponse', length) - return this.parseGH(buffer, msg) -} - -Connection.prototype.parseH = function (buffer, length) { - var msg = new Message('copyOutResponse', length) - return this.parseGH(buffer, msg) -} - -Connection.prototype.parseGH = function (buffer, msg) { - var isBinary = buffer[this.offset] !== 0 - this.offset++ - msg.binary = isBinary - var columnCount = this.parseInt16(buffer) - msg.columnTypes = [] - for (var i = 0; i < columnCount; i++) { - msg.columnTypes.push(this.parseInt16(buffer)) - } - return msg -} - -Connection.prototype.parsed = function (buffer, length) { - var msg = new Message('copyData', length) - msg.chunk = this.readBytes(buffer, msg.length - 4) - return msg -} - -Connection.prototype.parseInt32 = function (buffer) { - var value = buffer.readInt32BE(this.offset) - this.offset += 4 - return value -} - -Connection.prototype.parseInt16 = function (buffer) { - var value = buffer.readInt16BE(this.offset) - this.offset += 2 - return value -} - -Connection.prototype.readString = function (buffer, length) { - return buffer.toString(this.encoding, this.offset, (this.offset += length)) -} - -Connection.prototype.readBytes = function (buffer, length) { - return buffer.slice(this.offset, (this.offset += length)) -} - -Connection.prototype.parseCString = function (buffer) { - var start = this.offset - var end = buffer.indexOf(0, start) - this.offset = end + 1 - return buffer.toString(this.encoding, start, end) -} -// end parsing methods module.exports = Connection diff --git a/packages/pg/lib/result.js b/packages/pg/lib/result.js index 7e59a413e..d959e808a 100644 --- a/packages/pg/lib/result.js +++ b/packages/pg/lib/result.js @@ -16,9 +16,9 @@ var Result = function (rowMode, types) { this.command = null this.rowCount = null this.oid = null - this.rows = []; - this.fields = undefined; - this._parsers = undefined; + this.rows = [] + this.fields = undefined + this._parsers = undefined this._types = types this.RowCtor = null this.rowAsArray = rowMode === 'array' @@ -88,14 +88,14 @@ Result.prototype.addFields = function (fieldDescriptions) { // multiple query statements in 1 action can result in multiple sets // of rowDescriptions...eg: 'select NOW(); select 1::int;' // you need to reset the fields - this.fields = fieldDescriptions; + this.fields = fieldDescriptions if (this.fields.length) { - this._parsers = new Array(fieldDescriptions.length); + this._parsers = new Array(fieldDescriptions.length) } for (var i = 0; i < fieldDescriptions.length; i++) { var desc = fieldDescriptions[i] if (this._types) { - this._parsers[i] = this._types.getTypeParser(desc.dataTypeID, desc.format || 'text'); + this._parsers[i] = this._types.getTypeParser(desc.dataTypeID, desc.format || 'text') } else { this._parsers[i] = types.getTypeParser(desc.dataTypeID, desc.format || 'text') } From e03401081162ce71d66346f0669670528797c22a Mon Sep 17 00:00:00 2001 From: "Brian M. Carlson" Date: Thu, 26 Dec 2019 16:45:37 +0000 Subject: [PATCH 07/10] Add docker devcontainer stuff --- .devcontainer/Dockerfile | 68 ++++++++++++++++++++++++++++++++ .devcontainer/devcontainer.json | 31 +++++++++++++++ .devcontainer/docker-compose.yml | 44 +++++++++++++++++++++ 3 files changed, 143 insertions(+) create mode 100644 .devcontainer/Dockerfile create mode 100644 .devcontainer/devcontainer.json create mode 100644 .devcontainer/docker-compose.yml diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 000000000..c1c782d55 --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,68 @@ +#------------------------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. +#------------------------------------------------------------------------------------------------------------- + +FROM node:12 + +# Avoid warnings by switching to noninteractive +ENV DEBIAN_FRONTEND=noninteractive + +# The node image includes a non-root user with sudo access. Use the +# "remoteUser" property in devcontainer.json to use it. On Linux, update +# these values to ensure the container user's UID/GID matches your local values. +# See https://aka.ms/vscode-remote/containers/non-root-user for details. +ARG USERNAME=node +ARG USER_UID=1000 +ARG USER_GID=$USER_UID + +# Configure apt and install packages +RUN apt-get update \ + && apt-get -y install --no-install-recommends apt-utils dialog 2>&1 \ + # + # Verify git and needed tools are installed + && apt-get -y install git iproute2 procps \ + # + # Remove outdated yarn from /opt and install via package + # so it can be easily updated via apt-get upgrade yarn + && rm -rf /opt/yarn-* \ + && rm -f /usr/local/bin/yarn \ + && rm -f /usr/local/bin/yarnpkg \ + && apt-get install -y curl apt-transport-https lsb-release \ + && curl -sS https://dl.yarnpkg.com/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/pubkey.gpg | apt-key add - 2>/dev/null \ + && echo "deb https://dl.yarnpkg.com/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/ stable main" | tee /etc/apt/sources.list.d/yarn.list \ + && apt-get update \ + && apt-get -y install --no-install-recommends yarn tmux locales \ + # + # Install eslint globally + && npm install -g eslint \ + # + # [Optional] Update a non-root user to UID/GID if needed. + && if [ "$USER_GID" != "1000" ] || [ "$USER_UID" != "1000" ]; then \ + groupmod --gid $USER_GID $USERNAME \ + && usermod --uid $USER_UID --gid $USER_GID $USERNAME \ + && chown -R $USER_UID:$USER_GID /home/$USERNAME; \ + fi \ + # [Optional] Add add sudo support for non-root user + && apt-get install -y sudo \ + && echo node ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME \ + && chmod 0440 /etc/sudoers.d/$USERNAME \ + # + # Clean up + && apt-get autoremove -y \ + && apt-get clean -y \ + && rm -rf /var/lib/apt/lists/* + +RUN curl https://raw.githubusercontent.com/brianc/dotfiles/master/.tmux.conf > ~/.tmux.conf + +# install nvm +RUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.2/install.sh | bash + +# Set the locale +RUN sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && locale-gen +ENV LANG en_US.UTF-8 +ENV LANGUAGE en_US:en +ENV LC_ALL en_US.UTF-8 + +# Switch back to dialog for any ad-hoc use of apt-get +ENV DEBIAN_FRONTEND=dialog diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 000000000..14fb67344 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,31 @@ +// If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml. +{ + "name": "Node.js 12 & Postgres", + "dockerComposeFile": "docker-compose.yml", + "service": "web", + "workspaceFolder": "/workspace", + + // Use 'settings' to set *default* container specific settings.json values on container create. + // You can edit these settings after create using File > Preferences > Settings > Remote. + "settings": { + "terminal.integrated.shell.linux": "/bin/bash" + }, + + // Uncomment the next line if you want start specific services in your Docker Compose config. + // "runServices": [], + + // Uncomment the line below if you want to keep your containers running after VS Code shuts down. + // "shutdownAction": "none", + + // Uncomment the next line to run commands after the container is created. + // "postCreateCommand": "npm install", + + // Uncomment the next line to have VS Code connect as an existing non-root user in the container. See + // https://aka.ms/vscode-remote/containers/non-root for details on adding a non-root user if none exist. + // "remoteUser": "node", + + // Add the IDs of extensions you want installed when the container is created in the array below. + "extensions": [ + "dbaeumer.vscode-eslint" + ] +} \ No newline at end of file diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml new file mode 100644 index 000000000..184aff0ed --- /dev/null +++ b/.devcontainer/docker-compose.yml @@ -0,0 +1,44 @@ +#------------------------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. +#------------------------------------------------------------------------------------------------------------- + +version: '3' +services: + web: + # Uncomment the next line to use a non-root user for all processes. You can also + # simply use the "remoteUser" property in devcontainer.json if you just want VS Code + # and its sub-processes (terminals, tasks, debugging) to execute as the user. On Linux, + # you may need to update USER_UID and USER_GID in .devcontainer/Dockerfile to match your + # user if not 1000. See https://aka.ms/vscode-remote/containers/non-root for details. + # user: node + + build: + context: . + dockerfile: Dockerfile + + volumes: + - ..:/workspace:cached + + environment: + PGPASSWORD: pass + PGUSER: user + PGDATABASE: data + PGHOST: db + + # Overrides default command so things don't shut down after the process ends. + command: sleep infinity + + links: + - db + + db: + image: postgres + restart: unless-stopped + ports: + - 5432:5432 + environment: + POSTGRES_PASSWORD: pass + POSTGRES_USER: user + POSTGRES_DB: data + From 766e48f34a5efaf52cfdc545230aaccfbb3d5107 Mon Sep 17 00:00:00 2001 From: "Brian M. Carlson" Date: Fri, 27 Dec 2019 02:55:18 +0000 Subject: [PATCH 08/10] Update types & move some configs around --- .devcontainer/Dockerfile | 2 +- .eslintrc | 21 +++- packages/pg-packet-stream/package.json | 15 ++- packages/pg-packet-stream/src/index.test.ts | 103 -------------------- packages/pg-packet-stream/src/index.ts | 40 ++++---- packages/pg-packet-stream/src/messages.ts | 75 ++++++++++---- packages/pg-packet-stream/tsconfig.json | 1 + 7 files changed, 104 insertions(+), 153 deletions(-) delete mode 100644 packages/pg-packet-stream/src/index.test.ts diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index c1c782d55..179bc2250 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -32,7 +32,7 @@ RUN apt-get update \ && curl -sS https://dl.yarnpkg.com/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/pubkey.gpg | apt-key add - 2>/dev/null \ && echo "deb https://dl.yarnpkg.com/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/ stable main" | tee /etc/apt/sources.list.d/yarn.list \ && apt-get update \ - && apt-get -y install --no-install-recommends yarn tmux locales \ + && apt-get -y install --no-install-recommends yarn tmux locales postgresql \ # # Install eslint globally && npm install -g eslint \ diff --git a/.eslintrc b/.eslintrc index 6242db30c..e4ff2e0f0 100644 --- a/.eslintrc +++ b/.eslintrc @@ -1,8 +1,18 @@ { - "plugins": ["node"], - "extends": ["standard", "eslint:recommended", "plugin:node/recommended"], + "plugins": [ + "node" + ], + "extends": [ + "standard", + "eslint:recommended", + "plugin:node/recommended" + ], + "ignorePatterns": [ + "**/*.ts" + ], "parserOptions": { - "ecmaVersion": 2017 + "ecmaVersion": 2017, + "sourceType": "module" }, "env": { "node": true, @@ -11,10 +21,13 @@ }, "rules": { "space-before-function-paren": "off", + "node/no-unsupported-features/es-syntax": "off", "node/no-unpublished-require": [ "error", { - "allowModules": ["pg"] + "allowModules": [ + "pg" + ] } ] } diff --git a/packages/pg-packet-stream/package.json b/packages/pg-packet-stream/package.json index 89027056b..06f218756 100644 --- a/packages/pg-packet-stream/package.json +++ b/packages/pg-packet-stream/package.json @@ -2,21 +2,20 @@ "name": "pg-packet-stream", "version": "1.0.0", "main": "dist/index.js", + "types": "dist/index.d.ts", "license": "MIT", "devDependencies": { "@types/node": "^12.12.21", "chunky": "^0.0.0", - "mocha": "^6.2.2", - "typescript": "^3.7.3" - }, - "scripts": { - "test": "mocha -r ts-node/register src/**/*.test.ts" - }, - "dependencies": { + "typescript": "^3.7.3", "@types/chai": "^4.2.7", "@types/mocha": "^5.2.7", "chai": "^4.2.0", "mocha": "^6.2.2", "ts-node": "^8.5.4" - } + }, + "scripts": { + "test": "mocha -r ts-node/register src/**/*.test.ts" + }, + "dependencies": {} } diff --git a/packages/pg-packet-stream/src/index.test.ts b/packages/pg-packet-stream/src/index.test.ts deleted file mode 100644 index 1962329c5..000000000 --- a/packages/pg-packet-stream/src/index.test.ts +++ /dev/null @@ -1,103 +0,0 @@ -import 'mocha'; -import { PgPacketStream, Packet } from './' -import { expect } from 'chai' -import chunky from 'chunky' - -const consume = async (stream: PgPacketStream, count: number): Promise => { - const result: Packet[] = []; - - return new Promise((resolve) => { - const read = () => { - stream.once('readable', () => { - let packet; - while (packet = stream.read()) { - result.push(packet) - } - if (result.length === count) { - resolve(result); - } else { - read() - } - - }) - } - read() - }) -} - -const emptyMessage = Buffer.from([0x0a, 0x00, 0x00, 0x00, 0x04]) -const oneByteMessage = Buffer.from([0x0b, 0x00, 0x00, 0x00, 0x05, 0x0a]) -const bigMessage = Buffer.from([0x0f, 0x00, 0x00, 0x00, 0x14, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e0, 0x0f]) - -describe.skip('PgPacketStream', () => { - it('should chunk a perfect input packet', async () => { - const stream = new PgPacketStream() - stream.write(Buffer.from([0x01, 0x00, 0x00, 0x00, 0x04])) - stream.end() - const buffers = await consume(stream, 1) - expect(buffers).to.have.length(1) - expect(buffers[0].packet).to.deep.equal(Buffer.from([0x1, 0x00, 0x00, 0x00, 0x04])) - }); - - it('should read 2 chunks into perfect input packet', async () => { - const stream = new PgPacketStream() - stream.write(Buffer.from([0x01, 0x00, 0x00, 0x00, 0x08])) - stream.write(Buffer.from([0x1, 0x2, 0x3, 0x4])) - stream.end() - const buffers = await consume(stream, 1) - expect(buffers).to.have.length(1) - expect(buffers[0].packet).to.deep.equal(Buffer.from([0x1, 0x00, 0x00, 0x00, 0x08, 0x1, 0x2, 0x3, 0x4])) - }); - - it('should read a bunch of big messages', async () => { - const stream = new PgPacketStream(); - let totalBuffer = Buffer.allocUnsafe(0); - const num = 2; - for (let i = 0; i < 2; i++) { - totalBuffer = Buffer.concat([totalBuffer, bigMessage, bigMessage]) - } - const chunks = chunky(totalBuffer) - for (const chunk of chunks) { - stream.write(chunk) - } - stream.end() - const messages = await consume(stream, num * 2) - expect(messages.map(x => x.code)).to.eql(new Array(num * 2).fill(0x0f)) - }) - - it('should read multiple messages in a single chunk', async () => { - const stream = new PgPacketStream() - stream.write(Buffer.from([0x01, 0x00, 0x00, 0x00, 0x04, 0x01, 0x00, 0x00, 0x00, 0x04])) - stream.end() - const buffers = await consume(stream, 2) - expect(buffers).to.have.length(2) - expect(buffers[0].packet).to.deep.equal(Buffer.from([0x1, 0x00, 0x00, 0x00, 0x04])) - expect(buffers[1].packet).to.deep.equal(Buffer.from([0x1, 0x00, 0x00, 0x00, 0x04])) - }); - - it('should read multiple chunks into multiple packets', async () => { - const stream = new PgPacketStream() - stream.write(Buffer.from([0x01, 0x00, 0x00, 0x00, 0x05, 0x0a, 0x01, 0x00, 0x00, 0x00, 0x05, 0x0b])) - stream.write(Buffer.from([0x01, 0x00, 0x00])); - stream.write(Buffer.from([0x00, 0x06, 0x0c, 0x0d, 0x03, 0x00, 0x00, 0x00, 0x04])) - stream.end() - const buffers = await consume(stream, 4) - expect(buffers).to.have.length(4) - expect(buffers[0].packet).to.deep.equal(Buffer.from([0x1, 0x00, 0x00, 0x00, 0x05, 0x0a])) - expect(buffers[1].packet).to.deep.equal(Buffer.from([0x1, 0x00, 0x00, 0x00, 0x05, 0x0b])) - expect(buffers[2].packet).to.deep.equal(Buffer.from([0x1, 0x00, 0x00, 0x00, 0x06, 0x0c, 0x0d])) - expect(buffers[3].packet).to.deep.equal(Buffer.from([0x3, 0x00, 0x00, 0x00, 0x04])) - }); - - it('reads packet that spans multiple chunks', async () => { - const stream = new PgPacketStream() - stream.write(Buffer.from([0x0d, 0x00, 0x00, 0x00])) - stream.write(Buffer.from([0x09])) // length - stream.write(Buffer.from([0x0a, 0x0b, 0x0c, 0x0d])) - stream.write(Buffer.from([0x0a, 0x0b, 0x0c, 0x0d])) - stream.write(Buffer.from([0x0a, 0x0b, 0x0c, 0x0d])) - stream.end() - const buffers = await consume(stream, 1) - expect(buffers).to.have.length(1) - }) -}); diff --git a/packages/pg-packet-stream/src/index.ts b/packages/pg-packet-stream/src/index.ts index dc2af4246..2bd2da69c 100644 --- a/packages/pg-packet-stream/src/index.ts +++ b/packages/pg-packet-stream/src/index.ts @@ -1,5 +1,5 @@ import { Transform, TransformCallback, TransformOptions } from 'stream'; -import { Mode, bindComplete, parseComplete, closeComplete, noData, portalSuspended, copyDone, replicationStart, emptyQuery, ReadyForQueryMessage, CommandCompleteMessage, CopyDataMessage, CopyResponse, NotificationResponseMessage, RowDescriptionMessage, Field, DataRowMessage, ParameterStatusMessage, BackendKeyDataMessage, DatabaseError, BackendMessage } from './messages'; +import { Mode, bindComplete, parseComplete, closeComplete, noData, portalSuspended, copyDone, replicationStart, emptyQuery, ReadyForQueryMessage, CommandCompleteMessage, CopyDataMessage, CopyResponse, NotificationResponseMessage, RowDescriptionMessage, Field, DataRowMessage, ParameterStatusMessage, BackendKeyDataMessage, DatabaseError, BackendMessage, MessageName, AuthenticationMD5Password } from './messages'; import { BufferReader } from './BufferReader'; import assert from 'assert' @@ -63,7 +63,12 @@ export class PgPacketStream extends Transform { } public _transform(buffer: Buffer, encoding: string, callback: TransformCallback) { - const combinedBuffer: Buffer = this.remainingBuffer.byteLength ? Buffer.concat([this.remainingBuffer, buffer], this.remainingBuffer.length + buffer.length) : buffer; + let combinedBuffer = buffer; + if (this.remainingBuffer.byteLength) { + combinedBuffer = Buffer.allocUnsafe(this.remainingBuffer.byteLength + buffer.byteLength); + this.remainingBuffer.copy(combinedBuffer) + buffer.copy(combinedBuffer, this.remainingBuffer.byteLength) + } let offset = 0; while ((offset + HEADER_LENGTH) <= combinedBuffer.byteLength) { // code is 1 byte long - it identifies the message type @@ -125,9 +130,9 @@ export class PgPacketStream extends Transform { case MessageCodes.BackendKeyData: return this.parseBackendKeyData(offset, length, bytes); case MessageCodes.ErrorMessage: - return this.parseErrorMessage(offset, length, bytes, 'error'); + return this.parseErrorMessage(offset, length, bytes, MessageName.error); case MessageCodes.NoticeMessage: - return this.parseErrorMessage(offset, length, bytes, 'notice'); + return this.parseErrorMessage(offset, length, bytes, MessageName.notice); case MessageCodes.RowDescriptionMessage: return this.parseRowDescriptionMessage(offset, length, bytes); case MessageCodes.CopyIn: @@ -142,7 +147,7 @@ export class PgPacketStream extends Transform { } public _flush(callback: TransformCallback) { - this._transform(Buffer.alloc(0), 'utf-i', callback) + this._transform(Buffer.alloc(0), 'utf-8', callback) } private parseReadyForQueryMessage(offset: number, length: number, bytes: Buffer) { @@ -163,14 +168,14 @@ export class PgPacketStream extends Transform { } private parseCopyInMessage(offset: number, length: number, bytes: Buffer) { - return this.parseCopyMessage(offset, length, bytes, 'copyInResponse') + return this.parseCopyMessage(offset, length, bytes, MessageName.copyInResponse) } private parseCopyOutMessage(offset: number, length: number, bytes: Buffer) { - return this.parseCopyMessage(offset, length, bytes, 'copyOutResponse') + return this.parseCopyMessage(offset, length, bytes, MessageName.copyOutResponse) } - private parseCopyMessage(offset: number, length: number, bytes: Buffer, messageName: string) { + private parseCopyMessage(offset: number, length: number, bytes: Buffer, messageName: MessageName) { this.reader.setBuffer(offset, bytes); const isBinary = this.reader.byte() !== 0; const columnCount = this.reader.int16() @@ -244,8 +249,8 @@ export class PgPacketStream extends Transform { this.reader.setBuffer(offset, bytes); const code = this.reader.int32() // TODO(bmc): maybe better types here - const message: any = { - name: 'authenticationOk', + const message: BackendMessage & any = { + name: MessageName.authenticationOk, length, }; @@ -254,17 +259,18 @@ export class PgPacketStream extends Transform { break; case 3: // AuthenticationCleartextPassword if (message.length === 8) { - message.name = 'authenticationCleartextPassword' + message.name = MessageName.authenticationCleartextPassword } break case 5: // AuthenticationMD5Password if (message.length === 12) { - message.name = 'authenticationMD5Password' - message.salt = this.reader.bytes(4); + message.name = MessageName.authenticationMD5Password + const salt = this.reader.bytes(4); + return new AuthenticationMD5Password(length, salt); } break case 10: // AuthenticationSASL - message.name = 'authenticationSASL' + message.name = MessageName.authenticationSASL message.mechanisms = [] let mechanism: string; do { @@ -276,11 +282,11 @@ export class PgPacketStream extends Transform { } while (mechanism) break; case 11: // AuthenticationSASLContinue - message.name = 'authenticationSASLContinue' + message.name = MessageName.authenticationSASLContinue message.data = this.reader.string(length - 4) break; case 12: // AuthenticationSASLFinal - message.name = 'authenticationSASLFinal' + message.name = MessageName.authenticationSASLFinal message.data = this.reader.string(length - 4) break; default: @@ -289,7 +295,7 @@ export class PgPacketStream extends Transform { return message; } - private parseErrorMessage(offset: number, length: number, bytes: Buffer, name: string) { + private parseErrorMessage(offset: number, length: number, bytes: Buffer, name: MessageName) { this.reader.setBuffer(offset, bytes); var fields: Record = {} var fieldType = this.reader.string(1) diff --git a/packages/pg-packet-stream/src/messages.ts b/packages/pg-packet-stream/src/messages.ts index 26013cf13..160eb3ffb 100644 --- a/packages/pg-packet-stream/src/messages.ts +++ b/packages/pg-packet-stream/src/messages.ts @@ -1,47 +1,76 @@ export type Mode = 'text' | 'binary'; -export type BackendMessage = { - name: string; +export const enum MessageName { + parseComplete = 'parseComplete', + bindComplete = 'bindComplete', + closeComplete = 'closeComplete', + noData = 'noData', + portalSuspended = 'portalSuspended', + replicationStart = 'replicationStart', + emptyQuery = 'emptyQuery', + copyDone = 'copyDone', + copyData = 'copyData', + rowDescription = 'rowDescription', + parameterStatus = 'parameterStatus', + backendKeyData = 'backendKeyData', + notification = 'notification', + readyForQuery = 'readyForQuery', + commandComplete = 'commandComplete', + dataRow = 'dataRow', + copyInResponse = 'copyInResponse', + copyOutResponse = 'copyOutResponse', + authenticationOk = 'authenticationOk', + authenticationMD5Password = 'authenticationMD5Password', + authenticationCleartextPassword = 'authenticationCleartextPassword', + authenticationSASL = 'authenticationSASL', + authenticationSASLContinue = 'authenticationSASLContinue', + authenticationSASLFinal = 'authenticationSASLFinal', + error = 'error', + notice = 'notice', +} + +export interface BackendMessage { + name: MessageName; length: number; } export const parseComplete: BackendMessage = { - name: 'parseComplete', + name: MessageName.parseComplete, length: 5, }; export const bindComplete: BackendMessage = { - name: 'bindComplete', + name: MessageName.bindComplete, length: 5, } export const closeComplete: BackendMessage = { - name: 'closeComplete', + name: MessageName.closeComplete, length: 5, } export const noData: BackendMessage = { - name: 'noData', + name: MessageName.noData, length: 5 } export const portalSuspended: BackendMessage = { - name: 'portalSuspended', + name: MessageName.portalSuspended, length: 5, } export const replicationStart: BackendMessage = { - name: 'replicationStart', + name: MessageName.replicationStart, length: 4, } export const emptyQuery: BackendMessage = { - name: 'emptyQuery', + name: MessageName.emptyQuery, length: 4, } export const copyDone: BackendMessage = { - name: 'copyDone', + name: MessageName.copyDone, length: 4, } @@ -62,13 +91,13 @@ export class DatabaseError extends Error { public file: string | undefined; public line: string | undefined; public routine: string | undefined; - constructor(message: string, public readonly length: number, public readonly name: string) { + constructor(message: string, public readonly length: number, public readonly name: MessageName) { super(message) } } export class CopyDataMessage { - public readonly name = 'copyData'; + public readonly name = MessageName.copyData; constructor(public readonly length: number, public readonly chunk: Buffer) { } @@ -76,7 +105,7 @@ export class CopyDataMessage { export class CopyResponse { public readonly columnTypes: number[]; - constructor(public readonly length: number, public readonly name: string, public readonly binary: boolean, columnCount: number) { + constructor(public readonly length: number, public readonly name: MessageName, public readonly binary: boolean, columnCount: number) { this.columnTypes = new Array(columnCount); } } @@ -87,7 +116,7 @@ export class Field { } export class RowDescriptionMessage { - public readonly name: string = 'rowDescription'; + public readonly name: MessageName = MessageName.rowDescription; public readonly fields: Field[]; constructor(public readonly length: number, public readonly fieldCount: number) { this.fields = new Array(this.fieldCount) @@ -95,39 +124,45 @@ export class RowDescriptionMessage { } export class ParameterStatusMessage { - public readonly name: string = 'parameterStatus'; + public readonly name: MessageName = MessageName.parameterStatus; constructor(public readonly length: number, public readonly parameterName: string, public readonly parameterValue: string) { } } +export class AuthenticationMD5Password implements BackendMessage { + public readonly name: MessageName = MessageName.authenticationMD5Password; + constructor(public readonly length: number, public readonly salt: Buffer) { + } +} + export class BackendKeyDataMessage { - public readonly name: string = 'backendKeyData'; + public readonly name: MessageName = MessageName.backendKeyData; constructor(public readonly length: number, public readonly processID: number, public readonly secretKey: number) { } } export class NotificationResponseMessage { - public readonly name: string = 'notification'; + public readonly name: MessageName = MessageName.notification; constructor(public readonly length: number, public readonly processId: number, public readonly channel: string, public readonly payload: string) { } } export class ReadyForQueryMessage { - public readonly name: string = 'readyForQuery'; + public readonly name: MessageName = MessageName.readyForQuery; constructor(public readonly length: number, public readonly status: string) { } } export class CommandCompleteMessage { - public readonly name: string = 'commandComplete' + public readonly name: MessageName = MessageName.commandComplete constructor(public readonly length: number, public readonly text: string) { } } export class DataRowMessage { public readonly fieldCount: number; - public readonly name: string = 'dataRow' + public readonly name: MessageName = MessageName.dataRow constructor(public length: number, public fields: any[]) { this.fieldCount = fields.length; } diff --git a/packages/pg-packet-stream/tsconfig.json b/packages/pg-packet-stream/tsconfig.json index f6661febd..bdbe07a39 100644 --- a/packages/pg-packet-stream/tsconfig.json +++ b/packages/pg-packet-stream/tsconfig.json @@ -10,6 +10,7 @@ "sourceMap": true, "outDir": "dist", "baseUrl": ".", + "declaration": true, "paths": { "*": [ "node_modules/*", From 89b451e934d307595c039bf08adb384a8720df7d Mon Sep 17 00:00:00 2001 From: "Brian M. Carlson" Date: Fri, 27 Dec 2019 03:15:51 +0000 Subject: [PATCH 09/10] Properly merge dockerfile --- .devcontainer/Dockerfile | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index ae2d0397f..d60b0cc49 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -32,11 +32,7 @@ RUN apt-get update \ && curl -sS https://dl.yarnpkg.com/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/pubkey.gpg | apt-key add - 2>/dev/null \ && echo "deb https://dl.yarnpkg.com/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/ stable main" | tee /etc/apt/sources.list.d/yarn.list \ && apt-get update \ -<<<<<<< HEAD && apt-get -y install --no-install-recommends yarn tmux locales postgresql \ -======= - && apt-get -y install --no-install-recommends yarn tmux locales \ ->>>>>>> origin/master # # Install eslint globally && npm install -g eslint \ @@ -62,14 +58,11 @@ RUN curl https://raw.githubusercontent.com/brianc/dotfiles/master/.tmux.conf > ~ # install nvm RUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.2/install.sh | bash -<<<<<<< HEAD -======= # set up a nicer prompt RUN git clone https://github.com/magicmonty/bash-git-prompt.git ~/.bash-git-prompt --depth=1 RUN echo "source $HOME/.bash-git-prompt/gitprompt.sh" >> ~/.bashrc ->>>>>>> origin/master # Set the locale RUN sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && locale-gen ENV LANG en_US.UTF-8 From 6168f2ee0dc52f866250dca613170ce2f6747b49 Mon Sep 17 00:00:00 2001 From: "Brian M. Carlson" Date: Fri, 27 Dec 2019 03:22:30 +0000 Subject: [PATCH 10/10] Disable lint on missing module since the file is not included --- packages/pg/lib/connection-fast.js | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/pg/lib/connection-fast.js b/packages/pg/lib/connection-fast.js index 29752df72..38f55bdcd 100644 --- a/packages/pg/lib/connection-fast.js +++ b/packages/pg/lib/connection-fast.js @@ -12,6 +12,7 @@ var EventEmitter = require('events').EventEmitter var util = require('util') var Writer = require('buffer-writer') +// eslint-disable-next-line var PacketStream = require('pg-packet-stream') var TEXT_MODE = 0