init
Some checks failed
Actions / runner-job (push) Failing after 27s

This commit is contained in:
2025-07-28 01:44:37 +01:00
commit c762544c70
146 changed files with 13370 additions and 0 deletions

154
node_modules/.package-lock.json generated vendored Normal file
View File

@ -0,0 +1,154 @@
{
"name": "test-pg",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"node_modules/pg": {
"version": "8.16.3",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.16.3.tgz",
"integrity": "sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==",
"license": "MIT",
"dependencies": {
"pg-connection-string": "^2.9.1",
"pg-pool": "^3.10.1",
"pg-protocol": "^1.10.3",
"pg-types": "2.2.0",
"pgpass": "1.0.5"
},
"engines": {
"node": ">= 16.0.0"
},
"optionalDependencies": {
"pg-cloudflare": "^1.2.7"
},
"peerDependencies": {
"pg-native": ">=3.0.1"
},
"peerDependenciesMeta": {
"pg-native": {
"optional": true
}
}
},
"node_modules/pg-cloudflare": {
"version": "1.2.7",
"resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.2.7.tgz",
"integrity": "sha512-YgCtzMH0ptvZJslLM1ffsY4EuGaU0cx4XSdXLRFae8bPP4dS5xL1tNB3k2o/N64cHJpwU7dxKli/nZ2lUa5fLg==",
"license": "MIT",
"optional": true
},
"node_modules/pg-connection-string": {
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.9.1.tgz",
"integrity": "sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==",
"license": "MIT"
},
"node_modules/pg-int8": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
"license": "ISC",
"engines": {
"node": ">=4.0.0"
}
},
"node_modules/pg-pool": {
"version": "3.10.1",
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.10.1.tgz",
"integrity": "sha512-Tu8jMlcX+9d8+QVzKIvM/uJtp07PKr82IUOYEphaWcoBhIYkoHpLXN3qO59nAI11ripznDsEzEv8nUxBVWajGg==",
"license": "MIT",
"peerDependencies": {
"pg": ">=8.0"
}
},
"node_modules/pg-protocol": {
"version": "1.10.3",
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.10.3.tgz",
"integrity": "sha512-6DIBgBQaTKDJyxnXaLiLR8wBpQQcGWuAESkRBX/t6OwA8YsqP+iVSiond2EDy6Y/dsGk8rh/jtax3js5NeV7JQ==",
"license": "MIT"
},
"node_modules/pg-types": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
"integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
"license": "MIT",
"dependencies": {
"pg-int8": "1.0.1",
"postgres-array": "~2.0.0",
"postgres-bytea": "~1.0.0",
"postgres-date": "~1.0.4",
"postgres-interval": "^1.1.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/pgpass": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
"integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
"license": "MIT",
"dependencies": {
"split2": "^4.1.0"
}
},
"node_modules/postgres-array": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
"integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==",
"license": "MIT",
"engines": {
"node": ">=4"
}
},
"node_modules/postgres-bytea": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz",
"integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-date": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
"integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-interval": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
"integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
"license": "MIT",
"dependencies": {
"xtend": "^4.0.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/split2": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
"integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
"license": "ISC",
"engines": {
"node": ">= 10.x"
}
},
"node_modules/xtend": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==",
"license": "MIT",
"engines": {
"node": ">=0.4"
}
}
}
}

21
node_modules/pg-cloudflare/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2010 - 2021 Brian Carlson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

112
node_modules/pg-cloudflare/README.md generated vendored Normal file
View File

@ -0,0 +1,112 @@
# pg-cloudflare
`pg-cloudflare` makes it easier to take an existing package that relies on `tls` and `net`, and make it work in environments where only `connect()` is supported, such as Cloudflare Workers.
`pg-cloudflare` wraps `connect()`, the [TCP Socket API](https://github.com/wintercg/proposal-sockets-api) proposed within WinterCG, and implemented in [Cloudflare Workers](https://developers.cloudflare.com/workers/runtime-apis/tcp-sockets/), and exposes an interface with methods similar to what the `net` and `tls` modules in Node.js expose. (ex: `net.connect(path[, options][, callback])`). This minimizes the number of changes needed in order to make an existing package work across JavaScript runtimes.
## Installation
```
npm i --save-dev pg-cloudflare
```
The package uses conditional exports to support bundlers that don't know about
`cloudflare:sockets`, so the consumer code by default imports an empty file. To
enable the package, resolve to the `cloudflare` condition in your bundler's
config. For example:
- `webpack.config.js`
```js
export default {
...,
resolve: { conditionNames: [..., "workerd"] },
plugins: [
// ignore cloudflare:sockets imports
new webpack.IgnorePlugin({
resourceRegExp: /^cloudflare:sockets$/,
}),
],
}
```
- `vite.config.js`
> [!NOTE]
> If you are using the [Cloudflare Vite plugin](https://www.npmjs.com/package/@cloudflare/vite-plugin) then the following configuration is not necessary.
```js
export default defineConfig({
...,
resolve: {
conditions: [..., "workerd"],
},
build: {
...,
// don't try to bundle cloudflare:sockets
rollupOptions: {
external: [..., 'cloudflare:sockets'],
},
},
})
```
- `rollup.config.js`
```js
export default defineConfig({
...,
plugins: [..., nodeResolve({ exportConditions: [..., 'workerd'] })],
// don't try to bundle cloudflare:sockets
external: [..., 'cloudflare:sockets'],
})
```
- `esbuild.config.js`
```js
await esbuild.build({
...,
conditions: [..., 'workerd'],
})
```
The concrete examples can be found in `packages/pg-bundler-test`.
## How to use conditionally, in non-Node.js environments
As implemented in `pg` [here](https://github.com/brianc/node-postgres/commit/07553428e9c0eacf761a5d4541a3300ff7859578#diff-34588ad868ebcb232660aba7ee6a99d1e02f4bc93f73497d2688c3f074e60533R5-R13), a typical use case might look as follows, where in a Node.js environment the `net` module is used, while in a non-Node.js environment, where `net` is unavailable, `pg-cloudflare` is used instead, providing an equivalent interface:
```js
module.exports.getStream = function getStream(ssl = false) {
const net = require('net')
if (typeof net.Socket === 'function') {
return net.Socket()
}
const { CloudflareSocket } = require('pg-cloudflare')
return new CloudflareSocket(ssl)
}
```
## Node.js implementation of the Socket API proposal
If you're looking for a way to rely on `connect()` as the interface you use to interact with raw sockets, but need this interface to be available in a Node.js environment, [`@arrowood.dev/socket`](https://github.com/Ethan-Arrowood/socket) provides a Node.js implementation of the Socket API.
### license
The MIT License (MIT)
Copyright (c) 2023 Brian M. Carlson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

2
node_modules/pg-cloudflare/dist/empty.d.ts generated vendored Normal file
View File

@ -0,0 +1,2 @@
declare const _default: {};
export default _default;

6
node_modules/pg-cloudflare/dist/empty.js generated vendored Normal file
View File

@ -0,0 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// This is an empty module that is served up when outside of a workerd environment
// See the `exports` field in package.json
exports.default = {};
//# sourceMappingURL=empty.js.map

1
node_modules/pg-cloudflare/dist/empty.js.map generated vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"empty.js","sourceRoot":"","sources":["../src/empty.ts"],"names":[],"mappings":";;AAAA,kFAAkF;AAClF,0CAA0C;AAC1C,kBAAe,EAAE,CAAA"}

31
node_modules/pg-cloudflare/dist/index.d.ts generated vendored Normal file
View File

@ -0,0 +1,31 @@
/// <reference types="node" />
/// <reference types="node" />
/// <reference types="node" />
import { TlsOptions } from 'cloudflare:sockets';
import { EventEmitter } from 'events';
/**
* Wrapper around the Cloudflare built-in socket that can be used by the `Connection`.
*/
export declare class CloudflareSocket extends EventEmitter {
readonly ssl: boolean;
writable: boolean;
destroyed: boolean;
private _upgrading;
private _upgraded;
private _cfSocket;
private _cfWriter;
private _cfReader;
constructor(ssl: boolean);
setNoDelay(): this;
setKeepAlive(): this;
ref(): this;
unref(): this;
connect(port: number, host: string, connectListener?: (...args: unknown[]) => void): Promise<this | undefined>;
_listen(): Promise<void>;
_listenOnce(): Promise<void>;
write(data: Uint8Array | string, encoding?: BufferEncoding, callback?: (...args: unknown[]) => void): true | void;
end(data?: Buffer, encoding?: BufferEncoding, callback?: (...args: unknown[]) => void): this;
destroy(reason: string): this;
startTls(options: TlsOptions): void;
_addClosedHandler(): void;
}

152
node_modules/pg-cloudflare/dist/index.js generated vendored Normal file
View File

@ -0,0 +1,152 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CloudflareSocket = void 0;
const events_1 = require("events");
/**
* Wrapper around the Cloudflare built-in socket that can be used by the `Connection`.
*/
class CloudflareSocket extends events_1.EventEmitter {
constructor(ssl) {
super();
this.ssl = ssl;
this.writable = false;
this.destroyed = false;
this._upgrading = false;
this._upgraded = false;
this._cfSocket = null;
this._cfWriter = null;
this._cfReader = null;
}
setNoDelay() {
return this;
}
setKeepAlive() {
return this;
}
ref() {
return this;
}
unref() {
return this;
}
async connect(port, host, connectListener) {
try {
log('connecting');
if (connectListener)
this.once('connect', connectListener);
const options = this.ssl ? { secureTransport: 'starttls' } : {};
const mod = await import('cloudflare:sockets');
const connect = mod.connect;
this._cfSocket = connect(`${host}:${port}`, options);
this._cfWriter = this._cfSocket.writable.getWriter();
this._addClosedHandler();
this._cfReader = this._cfSocket.readable.getReader();
if (this.ssl) {
this._listenOnce().catch((e) => this.emit('error', e));
}
else {
this._listen().catch((e) => this.emit('error', e));
}
await this._cfWriter.ready;
log('socket ready');
this.writable = true;
this.emit('connect');
return this;
}
catch (e) {
this.emit('error', e);
}
}
async _listen() {
// eslint-disable-next-line no-constant-condition
while (true) {
log('awaiting receive from CF socket');
const { done, value } = await this._cfReader.read();
log('CF socket received:', done, value);
if (done) {
log('done');
break;
}
this.emit('data', Buffer.from(value));
}
}
async _listenOnce() {
log('awaiting first receive from CF socket');
const { done, value } = await this._cfReader.read();
log('First CF socket received:', done, value);
this.emit('data', Buffer.from(value));
}
write(data, encoding = 'utf8', callback = () => { }) {
if (data.length === 0)
return callback();
if (typeof data === 'string')
data = Buffer.from(data, encoding);
log('sending data direct:', data);
this._cfWriter.write(data).then(() => {
log('data sent');
callback();
}, (err) => {
log('send error', err);
callback(err);
});
return true;
}
end(data = Buffer.alloc(0), encoding = 'utf8', callback = () => { }) {
log('ending CF socket');
this.write(data, encoding, (err) => {
this._cfSocket.close();
if (callback)
callback(err);
});
return this;
}
destroy(reason) {
log('destroying CF socket', reason);
this.destroyed = true;
return this.end();
}
startTls(options) {
if (this._upgraded) {
// Don't try to upgrade again.
this.emit('error', 'Cannot call `startTls()` more than once on a socket');
return;
}
this._cfWriter.releaseLock();
this._cfReader.releaseLock();
this._upgrading = true;
this._cfSocket = this._cfSocket.startTls(options);
this._cfWriter = this._cfSocket.writable.getWriter();
this._cfReader = this._cfSocket.readable.getReader();
this._addClosedHandler();
this._listen().catch((e) => this.emit('error', e));
}
_addClosedHandler() {
this._cfSocket.closed.then(() => {
if (!this._upgrading) {
log('CF socket closed');
this._cfSocket = null;
this.emit('close');
}
else {
this._upgrading = false;
this._upgraded = true;
}
}).catch((e) => this.emit('error', e));
}
}
exports.CloudflareSocket = CloudflareSocket;
const debug = false;
function dump(data) {
if (data instanceof Uint8Array || data instanceof ArrayBuffer) {
const hex = Buffer.from(data).toString('hex');
const str = new TextDecoder().decode(data);
return `\n>>> STR: "${str.replace(/\n/g, '\\n')}"\n>>> HEX: ${hex}\n`;
}
else {
return data;
}
}
function log(...args) {
debug && console.log(...args.map(dump));
}
//# sourceMappingURL=index.js.map

1
node_modules/pg-cloudflare/dist/index.js.map generated vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AACA,mCAAqC;AAErC;;GAEG;AACH,MAAa,gBAAiB,SAAQ,qBAAY;IAUhD,YAAqB,GAAY;QAC/B,KAAK,EAAE,CAAA;QADY,QAAG,GAAH,GAAG,CAAS;QATjC,aAAQ,GAAG,KAAK,CAAA;QAChB,cAAS,GAAG,KAAK,CAAA;QAET,eAAU,GAAG,KAAK,CAAA;QAClB,cAAS,GAAG,KAAK,CAAA;QACjB,cAAS,GAAkB,IAAI,CAAA;QAC/B,cAAS,GAAuC,IAAI,CAAA;QACpD,cAAS,GAAuC,IAAI,CAAA;IAI5D,CAAC;IAED,UAAU;QACR,OAAO,IAAI,CAAA;IACb,CAAC;IACD,YAAY;QACV,OAAO,IAAI,CAAA;IACb,CAAC;IACD,GAAG;QACD,OAAO,IAAI,CAAA;IACb,CAAC;IACD,KAAK;QACH,OAAO,IAAI,CAAA;IACb,CAAC;IAED,KAAK,CAAC,OAAO,CAAC,IAAY,EAAE,IAAY,EAAE,eAA8C;QACtF,IAAI;YACF,GAAG,CAAC,YAAY,CAAC,CAAA;YACjB,IAAI,eAAe;gBAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,eAAe,CAAC,CAAA;YAE1D,MAAM,OAAO,GAAkB,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,eAAe,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAA;YAC9E,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,oBAAoB,CAAC,CAAA;YAC9C,MAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAA;YAC3B,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE,EAAE,OAAO,CAAC,CAAA;YACpD,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;YACpD,IAAI,CAAC,iBAAiB,EAAE,CAAA;YAExB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;YACpD,IAAI,IAAI,CAAC,GAAG,EAAE;gBACZ,IAAI,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;aACvD;iBAAM;gBACL,IAAI,CAAC,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;aACnD;YAED,MAAM,IAAI,CAAC,SAAU,CAAC,KAAK,CAAA;YAC3B,GAAG,CAAC,cAAc,CAAC,CAAA;YACnB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAA;YACpB,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;YAEpB,OAAO,IAAI,CAAA;SACZ;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAA;SACtB;IACH,CAAC;IAED,KAAK,CAAC,OAAO;QACX,iDAAiD;QACjD,OAAO,IAAI,EAAE;YACX,GAAG,CAAC,iCAAiC,CAAC,CAAA;YACtC,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,IAAI,CAAC,SAAU,CAAC,IAAI,EAAE,CAAA;YACpD,GAAG,CAAC,qBAAqB,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;YACvC,IAAI,IAAI,EAAE;gBACR,GAAG,CAAC,MAAM,CAAC,CAAA;gBACX,MAAK;aACN;YACD,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAA;SACtC;IACH,CAAC;IAED,KAAK,CAAC,WAAW;QACf,GAAG,CAAC,uCAAuC,CAAC,CAAA;QAC5C,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,IAAI,CAAC,SAAU,CAAC,IAAI,EAAE,CAAA;QACpD,GAAG,CAAC,2BAA2B,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;QAC7C,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAA;IACvC,CAAC;IAED,KAAK,CACH,IAAyB,EACzB,WAA2B,MAAM,EACjC,WAAyC,GAAG,EAAE,GAAE,CAAC;QAEjD,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO,QAAQ,EAAE,CAAA;QACxC,IAAI,OAAO,IAAI,KAAK,QAAQ;YAAE,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;QAEhE,GAAG,CAAC,sBAAsB,EAAE,IAAI,CAAC,CAAA;QACjC,IAAI,CAAC,SAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,CAC9B,GAAG,EAAE;YACH,GAAG,CAAC,WAAW,CAAC,CAAA;YAChB,QAAQ,EAAE,CAAA;QACZ,CAAC,EACD,CAAC,GAAG,EAAE,EAAE;YACN,GAAG,CAAC,YAAY,EAAE,GAAG,CAAC,CAAA;YACtB,QAAQ,CAAC,GAAG,CAAC,CAAA;QACf,CAAC,CACF,CAAA;QACD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,GAAG,CAAC,IAAI,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,WAA2B,MAAM,EAAE,WAAyC,GAAG,EAAE,GAAE,CAAC;QAC9G,GAAG,CAAC,kBAAkB,CAAC,CAAA;QACvB,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,EAAE,EAAE;YACjC,IAAI,CAAC,SAAU,CAAC,KAAK,EAAE,CAAA;YACvB,IAAI,QAAQ;gBAAE,QAAQ,CAAC,GAAG,CAAC,CAAA;QAC7B,CAAC,CAAC,CAAA;QACF,OAAO,IAAI,CAAA;IACb,CAAC;IAED,OAAO,CAAC,MAAc;QACpB,GAAG,CAAC,sBAAsB,EAAE,MAAM,CAAC,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;QACrB,OAAO,IAAI,CAAC,GAAG,EAAE,CAAA;IACnB,CAAC;IAED,QAAQ,CAAC,OAAmB;QAC1B,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,8BAA8B;YAC9B,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,qDAAqD,CAAC,CAAA;YACzE,OAAM;SACP;QACD,IAAI,CAAC,SAAU,CAAC,WAAW,EAAE,CAAA;QAC7B,IAAI,CAAC,SAAU,CAAC,WAAW,EAAE,CAAA;QAC7B,IAAI,CAAC,UAAU,GAAG,IAAI,CAAA;QACtB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAU,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAA;QAClD,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;QACpD,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;QACpD,IAAI,CAAC,iBAAiB,EAAE,CAAA;QACxB,IAAI,CAAC,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;IACpD,CAAC;IAED,iBAAiB;QACf,IAAI,CAAC,SAAU,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;YAC/B,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE;gBACpB,GAAG,CAAC,kBAAkB,CAAC,CAAA;gBACvB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;gBACrB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;aACnB;iBAAM;gBACL,IAAI,CAAC,UAAU,GAAG,KAAK,CAAA;gBACvB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;aACtB;QACH,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;IACxC,CAAC;CACF;AA/ID,4CA+IC;AAED,MAAM,KAAK,GAAG,KAAK,CAAA;AAEnB,SAAS,IAAI,CAAC,IAAa;IACzB,IAAI,IAAI,YAAY,UAAU,IAAI,IAAI,YAAY,WAAW,EAAE;QAC7D,MAAM,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;QAC7C,MAAM,GAAG,GAAG,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;QAC1C,OAAO,eAAe,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,eAAe,GAAG,IAAI,CAAA;KACtE;SAAM;QACL,OAAO,IAAI,CAAA;KACZ;AACH,CAAC;AAED,SAAS,GAAG,CAAC,GAAG,IAAe;IAC7B,KAAK,IAAI,OAAO,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAA;AACzC,CAAC"}

3
node_modules/pg-cloudflare/esm/index.mjs generated vendored Normal file
View File

@ -0,0 +1,3 @@
import cf from '../dist/index.js'
export const CloudflareSocket = cf.CloudflareSocket

38
node_modules/pg-cloudflare/package.json generated vendored Normal file
View File

@ -0,0 +1,38 @@
{
"name": "pg-cloudflare",
"version": "1.2.7",
"description": "A socket implementation that can run on Cloudflare Workers using native TCP connections.",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"license": "MIT",
"devDependencies": {
"ts-node": "^8.5.4",
"typescript": "^4.0.3"
},
"exports": {
".": {
"workerd": {
"import": "./esm/index.mjs",
"require": "./dist/index.js"
},
"default": "./dist/empty.js"
}
},
"scripts": {
"build": "tsc",
"build:watch": "tsc --watch",
"prepublish": "yarn build",
"test": "echo e2e test in pg package"
},
"repository": {
"type": "git",
"url": "git://github.com/brianc/node-postgres.git",
"directory": "packages/pg-cloudflare"
},
"files": [
"/dist/*{js,ts,map}",
"/src",
"/esm"
],
"gitHead": "8f8e7315e8f7c1bb01e98fdb41c8c92585510782"
}

3
node_modules/pg-cloudflare/src/empty.ts generated vendored Normal file
View File

@ -0,0 +1,3 @@
// This is an empty module that is served up when outside of a workerd environment
// See the `exports` field in package.json
export default {}

166
node_modules/pg-cloudflare/src/index.ts generated vendored Normal file
View File

@ -0,0 +1,166 @@
import { SocketOptions, Socket, TlsOptions } from 'cloudflare:sockets'
import { EventEmitter } from 'events'
/**
* Wrapper around the Cloudflare built-in socket that can be used by the `Connection`.
*/
export class CloudflareSocket extends EventEmitter {
writable = false
destroyed = false
private _upgrading = false
private _upgraded = false
private _cfSocket: Socket | null = null
private _cfWriter: WritableStreamDefaultWriter | null = null
private _cfReader: ReadableStreamDefaultReader | null = null
constructor(readonly ssl: boolean) {
super()
}
setNoDelay() {
return this
}
setKeepAlive() {
return this
}
ref() {
return this
}
unref() {
return this
}
async connect(port: number, host: string, connectListener?: (...args: unknown[]) => void) {
try {
log('connecting')
if (connectListener) this.once('connect', connectListener)
const options: SocketOptions = this.ssl ? { secureTransport: 'starttls' } : {}
const mod = await import('cloudflare:sockets')
const connect = mod.connect
this._cfSocket = connect(`${host}:${port}`, options)
this._cfWriter = this._cfSocket.writable.getWriter()
this._addClosedHandler()
this._cfReader = this._cfSocket.readable.getReader()
if (this.ssl) {
this._listenOnce().catch((e) => this.emit('error', e))
} else {
this._listen().catch((e) => this.emit('error', e))
}
await this._cfWriter!.ready
log('socket ready')
this.writable = true
this.emit('connect')
return this
} catch (e) {
this.emit('error', e)
}
}
async _listen() {
// eslint-disable-next-line no-constant-condition
while (true) {
log('awaiting receive from CF socket')
const { done, value } = await this._cfReader!.read()
log('CF socket received:', done, value)
if (done) {
log('done')
break
}
this.emit('data', Buffer.from(value))
}
}
async _listenOnce() {
log('awaiting first receive from CF socket')
const { done, value } = await this._cfReader!.read()
log('First CF socket received:', done, value)
this.emit('data', Buffer.from(value))
}
write(
data: Uint8Array | string,
encoding: BufferEncoding = 'utf8',
callback: (...args: unknown[]) => void = () => {}
) {
if (data.length === 0) return callback()
if (typeof data === 'string') data = Buffer.from(data, encoding)
log('sending data direct:', data)
this._cfWriter!.write(data).then(
() => {
log('data sent')
callback()
},
(err) => {
log('send error', err)
callback(err)
}
)
return true
}
end(data = Buffer.alloc(0), encoding: BufferEncoding = 'utf8', callback: (...args: unknown[]) => void = () => {}) {
log('ending CF socket')
this.write(data, encoding, (err) => {
this._cfSocket!.close()
if (callback) callback(err)
})
return this
}
destroy(reason: string) {
log('destroying CF socket', reason)
this.destroyed = true
return this.end()
}
startTls(options: TlsOptions) {
if (this._upgraded) {
// Don't try to upgrade again.
this.emit('error', 'Cannot call `startTls()` more than once on a socket')
return
}
this._cfWriter!.releaseLock()
this._cfReader!.releaseLock()
this._upgrading = true
this._cfSocket = this._cfSocket!.startTls(options)
this._cfWriter = this._cfSocket.writable.getWriter()
this._cfReader = this._cfSocket.readable.getReader()
this._addClosedHandler()
this._listen().catch((e) => this.emit('error', e))
}
_addClosedHandler() {
this._cfSocket!.closed.then(() => {
if (!this._upgrading) {
log('CF socket closed')
this._cfSocket = null
this.emit('close')
} else {
this._upgrading = false
this._upgraded = true
}
}).catch((e) => this.emit('error', e))
}
}
const debug = false
function dump(data: unknown) {
if (data instanceof Uint8Array || data instanceof ArrayBuffer) {
const hex = Buffer.from(data).toString('hex')
const str = new TextDecoder().decode(data)
return `\n>>> STR: "${str.replace(/\n/g, '\\n')}"\n>>> HEX: ${hex}\n`
} else {
return data
}
}
function log(...args: unknown[]) {
debug && console.log(...args.map(dump))
}

25
node_modules/pg-cloudflare/src/types.d.ts generated vendored Normal file
View File

@ -0,0 +1,25 @@
declare module 'cloudflare:sockets' {
export class Socket {
public readonly readable: any
public readonly writable: any
public readonly closed: Promise<void>
public close(): Promise<void>
public startTls(options: TlsOptions): Socket
}
export type TlsOptions = {
expectedServerHostname?: string
}
export type SocketAddress = {
hostname: string
port: number
}
export type SocketOptions = {
secureTransport?: 'off' | 'on' | 'starttls'
allowHalfOpen?: boolean
}
export function connect(address: string | SocketAddress, options?: SocketOptions): Socket
}

21
node_modules/pg-connection-string/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 Iced Development
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

105
node_modules/pg-connection-string/README.md generated vendored Normal file
View File

@ -0,0 +1,105 @@
pg-connection-string
====================
[![NPM](https://nodei.co/npm/pg-connection-string.png?compact=true)](https://nodei.co/npm/pg-connection-string/)
Functions for dealing with a PostgresSQL connection string
`parse` method taken from [node-postgres](https://github.com/brianc/node-postgres.git)
Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com)
MIT License
## Usage
```js
const parse = require('pg-connection-string').parse;
const config = parse('postgres://someuser:somepassword@somehost:381/somedatabase')
```
The resulting config contains a subset of the following properties:
* `user` - User with which to authenticate to the server
* `password` - Corresponding password
* `host` - Postgres server hostname or, for UNIX domain sockets, the socket filename
* `port` - port on which to connect
* `database` - Database name within the server
* `client_encoding` - string encoding the client will use
* `ssl`, either a boolean or an object with properties
* `rejectUnauthorized`
* `cert`
* `key`
* `ca`
* any other query parameters (for example, `application_name`) are preserved intact.
### ClientConfig Compatibility for TypeScript
The pg-connection-string `ConnectionOptions` interface is not compatible with the `ClientConfig` interface that [pg.Client](https://node-postgres.com/apis/client) expects. To remedy this, use the `parseIntoClientConfig` function instead of `parse`:
```ts
import { ClientConfig } from 'pg';
import { parseIntoClientConfig } from 'pg-connection-string';
const config: ClientConfig = parseIntoClientConfig('postgres://someuser:somepassword@somehost:381/somedatabase')
```
You can also use `toClientConfig` to convert an existing `ConnectionOptions` interface into a `ClientConfig` interface:
```ts
import { ClientConfig } from 'pg';
import { parse, toClientConfig } from 'pg-connection-string';
const config = parse('postgres://someuser:somepassword@somehost:381/somedatabase')
const clientConfig: ClientConfig = toClientConfig(config)
```
## Connection Strings
The short summary of acceptable URLs is:
* `socket:<path>?<query>` - UNIX domain socket
* `postgres://<user>:<password>@<host>:<port>/<database>?<query>` - TCP connection
But see below for more details.
### UNIX Domain Sockets
When user and password are not given, the socket path follows `socket:`, as in `socket:/var/run/pgsql`.
This form can be shortened to just a path: `/var/run/pgsql`.
When user and password are given, they are included in the typical URL positions, with an empty `host`, as in `socket://user:pass@/var/run/pgsql`.
Query parameters follow a `?` character, including the following special query parameters:
* `db=<database>` - sets the database name (urlencoded)
* `encoding=<encoding>` - sets the `client_encoding` property
### TCP Connections
TCP connections to the Postgres server are indicated with `pg:` or `postgres:` schemes (in fact, any scheme but `socket:` is accepted).
If username and password are included, they should be urlencoded.
The database name, however, should *not* be urlencoded.
Query parameters follow a `?` character, including the following special query parameters:
* `host=<host>` - sets `host` property, overriding the URL's host
* `encoding=<encoding>` - sets the `client_encoding` property
* `ssl=1`, `ssl=true`, `ssl=0`, `ssl=false` - sets `ssl` to true or false, accordingly
* `uselibpqcompat=true` - use libpq semantics
* `sslmode=<sslmode>` when `uselibpqcompat=true` is not set
* `sslmode=disable` - sets `ssl` to false
* `sslmode=no-verify` - sets `ssl` to `{ rejectUnauthorized: false }`
* `sslmode=prefer`, `sslmode=require`, `sslmode=verify-ca`, `sslmode=verify-full` - sets `ssl` to true
* `sslmode=<sslmode>` when `uselibpqcompat=true`
* `sslmode=disable` - sets `ssl` to false
* `sslmode=prefer` - sets `ssl` to `{ rejectUnauthorized: false }`
* `sslmode=require` - sets `ssl` to `{ rejectUnauthorized: false }` unless `sslrootcert` is specified, in which case it behaves like `verify-ca`
* `sslmode=verify-ca` - sets `ssl` to `{ checkServerIdentity: no-op }` (verify CA, but not server identity). This verifies the presented certificate against the effective CA specified in sslrootcert.
* `sslmode=verify-full` - sets `ssl` to `{}` (verify CA and server identity)
* `sslcert=<filename>` - reads data from the given file and includes the result as `ssl.cert`
* `sslkey=<filename>` - reads data from the given file and includes the result as `ssl.key`
* `sslrootcert=<filename>` - reads data from the given file and includes the result as `ssl.ca`
A bare relative URL, such as `salesdata`, will indicate a database name while leaving other properties empty.
> [!CAUTION]
> Choosing an sslmode other than verify-full has serious security implications. Please read https://www.postgresql.org/docs/current/libpq-ssl.html#LIBPQ-SSL-SSLMODE-STATEMENTS to understand the trade-offs.

8
node_modules/pg-connection-string/esm/index.mjs generated vendored Normal file
View File

@ -0,0 +1,8 @@
// ESM wrapper for pg-connection-string
import connectionString from '../index.js'
// Re-export the parse function
export default connectionString.parse
export const parse = connectionString.parse
export const toClientConfig = connectionString.toClientConfig
export const parseIntoClientConfig = connectionString.parseIntoClientConfig

36
node_modules/pg-connection-string/index.d.ts generated vendored Normal file
View File

@ -0,0 +1,36 @@
import { ClientConfig } from 'pg'
export function parse(connectionString: string, options?: Options): ConnectionOptions
export interface Options {
// Use libpq semantics when interpreting the connection string
useLibpqCompat?: boolean
}
interface SSLConfig {
ca?: string
cert?: string | null
key?: string
rejectUnauthorized?: boolean
}
export interface ConnectionOptions {
host: string | null
password?: string
user?: string
port?: string | null
database: string | null | undefined
client_encoding?: string
ssl?: boolean | string | SSLConfig
application_name?: string
fallback_application_name?: string
options?: string
keepalives?: number
// We allow any other options to be passed through
[key: string]: unknown
}
export function toClientConfig(config: ConnectionOptions): ClientConfig
export function parseIntoClientConfig(connectionString: string): ClientConfig

213
node_modules/pg-connection-string/index.js generated vendored Normal file
View File

@ -0,0 +1,213 @@
'use strict'
//Parse method copied from https://github.com/brianc/node-postgres
//Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com)
//MIT License
//parses a connection string
function parse(str, options = {}) {
//unix socket
if (str.charAt(0) === '/') {
const config = str.split(' ')
return { host: config[0], database: config[1] }
}
// Check for empty host in URL
const config = {}
let result
let dummyHost = false
if (/ |%[^a-f0-9]|%[a-f0-9][^a-f0-9]/i.test(str)) {
// Ensure spaces are encoded as %20
str = encodeURI(str).replace(/%25(\d\d)/g, '%$1')
}
try {
try {
result = new URL(str, 'postgres://base')
} catch (e) {
// The URL is invalid so try again with a dummy host
result = new URL(str.replace('@/', '@___DUMMY___/'), 'postgres://base')
dummyHost = true
}
} catch (err) {
// Remove the input from the error message to avoid leaking sensitive information
err.input && (err.input = '*****REDACTED*****')
}
// We'd like to use Object.fromEntries() here but Node.js 10 does not support it
for (const entry of result.searchParams.entries()) {
config[entry[0]] = entry[1]
}
config.user = config.user || decodeURIComponent(result.username)
config.password = config.password || decodeURIComponent(result.password)
if (result.protocol == 'socket:') {
config.host = decodeURI(result.pathname)
config.database = result.searchParams.get('db')
config.client_encoding = result.searchParams.get('encoding')
return config
}
const hostname = dummyHost ? '' : result.hostname
if (!config.host) {
// Only set the host if there is no equivalent query param.
config.host = decodeURIComponent(hostname)
} else if (hostname && /^%2f/i.test(hostname)) {
// Only prepend the hostname to the pathname if it is not a URL encoded Unix socket host.
result.pathname = hostname + result.pathname
}
if (!config.port) {
// Only set the port if there is no equivalent query param.
config.port = result.port
}
const pathname = result.pathname.slice(1) || null
config.database = pathname ? decodeURI(pathname) : null
if (config.ssl === 'true' || config.ssl === '1') {
config.ssl = true
}
if (config.ssl === '0') {
config.ssl = false
}
if (config.sslcert || config.sslkey || config.sslrootcert || config.sslmode) {
config.ssl = {}
}
// Only try to load fs if we expect to read from the disk
const fs = config.sslcert || config.sslkey || config.sslrootcert ? require('fs') : null
if (config.sslcert) {
config.ssl.cert = fs.readFileSync(config.sslcert).toString()
}
if (config.sslkey) {
config.ssl.key = fs.readFileSync(config.sslkey).toString()
}
if (config.sslrootcert) {
config.ssl.ca = fs.readFileSync(config.sslrootcert).toString()
}
if (options.useLibpqCompat && config.uselibpqcompat) {
throw new Error('Both useLibpqCompat and uselibpqcompat are set. Please use only one of them.')
}
if (config.uselibpqcompat === 'true' || options.useLibpqCompat) {
switch (config.sslmode) {
case 'disable': {
config.ssl = false
break
}
case 'prefer': {
config.ssl.rejectUnauthorized = false
break
}
case 'require': {
if (config.sslrootcert) {
// If a root CA is specified, behavior of `sslmode=require` will be the same as that of `verify-ca`
config.ssl.checkServerIdentity = function () {}
} else {
config.ssl.rejectUnauthorized = false
}
break
}
case 'verify-ca': {
if (!config.ssl.ca) {
throw new Error(
'SECURITY WARNING: Using sslmode=verify-ca requires specifying a CA with sslrootcert. If a public CA is used, verify-ca allows connections to a server that somebody else may have registered with the CA, making you vulnerable to Man-in-the-Middle attacks. Either specify a custom CA certificate with sslrootcert parameter or use sslmode=verify-full for proper security.'
)
}
config.ssl.checkServerIdentity = function () {}
break
}
case 'verify-full': {
break
}
}
} else {
switch (config.sslmode) {
case 'disable': {
config.ssl = false
break
}
case 'prefer':
case 'require':
case 'verify-ca':
case 'verify-full': {
break
}
case 'no-verify': {
config.ssl.rejectUnauthorized = false
break
}
}
}
return config
}
// convert pg-connection-string ssl config to a ClientConfig.ConnectionOptions
function toConnectionOptions(sslConfig) {
const connectionOptions = Object.entries(sslConfig).reduce((c, [key, value]) => {
// we explicitly check for undefined and null instead of `if (value)` because some
// options accept falsy values. Example: `ssl.rejectUnauthorized = false`
if (value !== undefined && value !== null) {
c[key] = value
}
return c
}, {})
return connectionOptions
}
// convert pg-connection-string config to a ClientConfig
function toClientConfig(config) {
const poolConfig = Object.entries(config).reduce((c, [key, value]) => {
if (key === 'ssl') {
const sslConfig = value
if (typeof sslConfig === 'boolean') {
c[key] = sslConfig
}
if (typeof sslConfig === 'object') {
c[key] = toConnectionOptions(sslConfig)
}
} else if (value !== undefined && value !== null) {
if (key === 'port') {
// when port is not specified, it is converted into an empty string
// we want to avoid NaN or empty string as a values in ClientConfig
if (value !== '') {
const v = parseInt(value, 10)
if (isNaN(v)) {
throw new Error(`Invalid ${key}: ${value}`)
}
c[key] = v
}
} else {
c[key] = value
}
}
return c
}, {})
return poolConfig
}
// parses a connection string into ClientConfig
function parseIntoClientConfig(str) {
return toClientConfig(parse(str))
}
module.exports = parse
parse.parse = parse
parse.toClientConfig = toClientConfig
parse.parseIntoClientConfig = parseIntoClientConfig

52
node_modules/pg-connection-string/package.json generated vendored Normal file
View File

@ -0,0 +1,52 @@
{
"name": "pg-connection-string",
"version": "2.9.1",
"description": "Functions for dealing with a PostgresSQL connection string",
"main": "./index.js",
"types": "./index.d.ts",
"exports": {
".": {
"types": "./index.d.ts",
"import": "./esm/index.mjs",
"require": "./index.js",
"default": "./index.js"
}
},
"scripts": {
"test": "nyc --reporter=lcov mocha && npm run check-coverage",
"check-coverage": "nyc check-coverage --statements 100 --branches 100 --lines 100 --functions 100"
},
"repository": {
"type": "git",
"url": "git://github.com/brianc/node-postgres.git",
"directory": "packages/pg-connection-string"
},
"keywords": [
"pg",
"connection",
"string",
"parse"
],
"author": "Blaine Bublitz <blaine@iceddev.com> (http://iceddev.com/)",
"license": "MIT",
"bugs": {
"url": "https://github.com/brianc/node-postgres/issues"
},
"homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string",
"devDependencies": {
"@types/pg": "^8.12.0",
"chai": "^4.1.1",
"coveralls": "^3.0.4",
"istanbul": "^0.4.5",
"mocha": "^10.5.2",
"nyc": "^15",
"tsx": "^4.19.4",
"typescript": "^4.0.3"
},
"files": [
"index.js",
"index.d.ts",
"esm"
],
"gitHead": "cd877a57612a39335a97b593111710d26126279d"
}

13
node_modules/pg-int8/LICENSE generated vendored Normal file
View File

@ -0,0 +1,13 @@
Copyright © 2017, Charmander <~@charmander.me>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.

16
node_modules/pg-int8/README.md generated vendored Normal file
View File

@ -0,0 +1,16 @@
[![Build status][ci image]][ci]
64-bit big-endian signed integer-to-string conversion designed for [pg][].
```js
const readInt8 = require('pg-int8');
readInt8(Buffer.from([0, 1, 2, 3, 4, 5, 6, 7]))
// '283686952306183'
```
[pg]: https://github.com/brianc/node-postgres
[ci]: https://travis-ci.org/charmander/pg-int8
[ci image]: https://api.travis-ci.org/charmander/pg-int8.svg

100
node_modules/pg-int8/index.js generated vendored Normal file
View File

@ -0,0 +1,100 @@
'use strict';
// selected so (BASE - 1) * 0x100000000 + 0xffffffff is a safe integer
var BASE = 1000000;
function readInt8(buffer) {
var high = buffer.readInt32BE(0);
var low = buffer.readUInt32BE(4);
var sign = '';
if (high < 0) {
high = ~high + (low === 0);
low = (~low + 1) >>> 0;
sign = '-';
}
var result = '';
var carry;
var t;
var digits;
var pad;
var l;
var i;
{
carry = high % BASE;
high = high / BASE >>> 0;
t = 0x100000000 * carry + low;
low = t / BASE >>> 0;
digits = '' + (t - BASE * low);
if (low === 0 && high === 0) {
return sign + digits + result;
}
pad = '';
l = 6 - digits.length;
for (i = 0; i < l; i++) {
pad += '0';
}
result = pad + digits + result;
}
{
carry = high % BASE;
high = high / BASE >>> 0;
t = 0x100000000 * carry + low;
low = t / BASE >>> 0;
digits = '' + (t - BASE * low);
if (low === 0 && high === 0) {
return sign + digits + result;
}
pad = '';
l = 6 - digits.length;
for (i = 0; i < l; i++) {
pad += '0';
}
result = pad + digits + result;
}
{
carry = high % BASE;
high = high / BASE >>> 0;
t = 0x100000000 * carry + low;
low = t / BASE >>> 0;
digits = '' + (t - BASE * low);
if (low === 0 && high === 0) {
return sign + digits + result;
}
pad = '';
l = 6 - digits.length;
for (i = 0; i < l; i++) {
pad += '0';
}
result = pad + digits + result;
}
{
carry = high % BASE;
t = 0x100000000 * carry + low;
digits = '' + t % BASE;
return sign + digits + result;
}
}
module.exports = readInt8;

24
node_modules/pg-int8/package.json generated vendored Normal file
View File

@ -0,0 +1,24 @@
{
"name": "pg-int8",
"version": "1.0.1",
"description": "64-bit big-endian signed integer-to-string conversion",
"bugs": "https://github.com/charmander/pg-int8/issues",
"license": "ISC",
"files": [
"index.js"
],
"repository": {
"type": "git",
"url": "https://github.com/charmander/pg-int8"
},
"scripts": {
"test": "tap test"
},
"devDependencies": {
"@charmander/eslint-config-base": "1.0.2",
"tap": "10.7.3"
},
"engines": {
"node": ">=4.0.0"
}
}

21
node_modules/pg-pool/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017 Brian M. Carlson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

376
node_modules/pg-pool/README.md generated vendored Normal file
View File

@ -0,0 +1,376 @@
# pg-pool
[![Build Status](https://travis-ci.org/brianc/node-pg-pool.svg?branch=master)](https://travis-ci.org/brianc/node-pg-pool)
A connection pool for node-postgres
## install
```sh
npm i pg-pool pg
```
## use
### create
to use pg-pool you must first create an instance of a pool
```js
const Pool = require('pg-pool')
// by default the pool uses the same
// configuration as whatever `pg` version you have installed
const pool = new Pool()
// you can pass properties to the pool
// these properties are passed unchanged to both the node-postgres Client constructor
// and the node-pool (https://github.com/coopernurse/node-pool) constructor
// allowing you to fully configure the behavior of both
const pool2 = new Pool({
database: 'postgres',
user: 'brianc',
password: 'secret!',
port: 5432,
ssl: true,
max: 20, // set pool max size to 20
idleTimeoutMillis: 1000, // close idle clients after 1 second
connectionTimeoutMillis: 1000, // return an error after 1 second if connection could not be established
maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion)
})
// you can supply a custom client constructor
// if you want to use the native postgres client
const NativeClient = require('pg').native.Client
const nativePool = new Pool({ Client: NativeClient })
// you can even pool pg-native clients directly
const PgNativeClient = require('pg-native')
const pgNativePool = new Pool({ Client: PgNativeClient })
```
##### Note:
The Pool constructor does not support passing a Database URL as the parameter. To use pg-pool on heroku, for example, you need to parse the URL into a config object. Here is an example of how to parse a Database URL.
```js
const Pool = require('pg-pool');
const url = require('url')
const params = url.parse(process.env.DATABASE_URL);
const auth = params.auth.split(':');
const config = {
user: auth[0],
password: auth[1],
host: params.hostname,
port: params.port,
database: params.pathname.split('/')[1],
ssl: true
};
const pool = new Pool(config);
/*
Transforms, 'postgres://DBuser:secret@DBHost:#####/myDB', into
config = {
user: 'DBuser',
password: 'secret',
host: 'DBHost',
port: '#####',
database: 'myDB',
ssl: true
}
*/
```
### acquire clients with a promise
pg-pool supports a fully promise-based api for acquiring clients
```js
const pool = new Pool()
pool.connect().then(client => {
client.query('select $1::text as name', ['pg-pool']).then(res => {
client.release()
console.log('hello from', res.rows[0].name)
})
.catch(e => {
client.release()
console.error('query error', e.message, e.stack)
})
})
```
### plays nice with async/await
this ends up looking much nicer if you're using [co](https://github.com/tj/co) or async/await:
```js
// with async/await
(async () => {
const pool = new Pool()
const client = await pool.connect()
try {
const result = await client.query('select $1::text as name', ['brianc'])
console.log('hello from', result.rows[0])
} finally {
client.release()
}
})().catch(e => console.error(e.message, e.stack))
// with co
co(function * () {
const client = yield pool.connect()
try {
const result = yield client.query('select $1::text as name', ['brianc'])
console.log('hello from', result.rows[0])
} finally {
client.release()
}
}).catch(e => console.error(e.message, e.stack))
```
### your new favorite helper method
because its so common to just run a query and return the client to the pool afterward pg-pool has this built-in:
```js
const pool = new Pool()
const time = await pool.query('SELECT NOW()')
const name = await pool.query('select $1::text as name', ['brianc'])
console.log(name.rows[0].name, 'says hello at', time.rows[0].now)
```
you can also use a callback here if you'd like:
```js
const pool = new Pool()
pool.query('SELECT $1::text as name', ['brianc'], function (err, res) {
console.log(res.rows[0].name) // brianc
})
```
__pro tip:__ unless you need to run a transaction (which requires a single client for multiple queries) or you
have some other edge case like [streaming rows](https://github.com/brianc/node-pg-query-stream) or using a [cursor](https://github.com/brianc/node-pg-cursor)
you should almost always just use `pool.query`. Its easy, it does the right thing :tm:, and wont ever forget to return
clients back to the pool after the query is done.
### drop-in backwards compatible
pg-pool still and will always support the traditional callback api for acquiring a client. This is the exact API node-postgres has shipped with for years:
```js
const pool = new Pool()
pool.connect((err, client, done) => {
if (err) return done(err)
client.query('SELECT $1::text as name', ['pg-pool'], (err, res) => {
done()
if (err) {
return console.error('query error', err.message, err.stack)
}
console.log('hello from', res.rows[0].name)
})
})
```
### shut it down
When you are finished with the pool if all the clients are idle the pool will close them after `config.idleTimeoutMillis` and your app
will shutdown gracefully. If you don't want to wait for the timeout you can end the pool as follows:
```js
const pool = new Pool()
const client = await pool.connect()
console.log(await client.query('select now()'))
client.release()
await pool.end()
```
### a note on instances
The pool should be a __long-lived object__ in your application. Generally you'll want to instantiate one pool when your app starts up and use the same instance of the pool throughout the lifetime of your application. If you are frequently creating a new pool within your code you likely don't have your pool initialization code in the correct place. Example:
```js
// assume this is a file in your program at ./your-app/lib/db.js
// correct usage: create the pool and let it live
// 'globally' here, controlling access to it through exported methods
const pool = new pg.Pool()
// this is the right way to export the query method
module.exports.query = (text, values) => {
console.log('query:', text, values)
return pool.query(text, values)
}
// this would be the WRONG way to export the connect method
module.exports.connect = () => {
// notice how we would be creating a pool instance here
// every time we called 'connect' to get a new client?
// that's a bad thing & results in creating an unbounded
// number of pools & therefore connections
const aPool = new pg.Pool()
return aPool.connect()
}
```
### events
Every instance of a `Pool` is an event emitter. These instances emit the following events:
#### error
Emitted whenever an idle client in the pool encounters an error. This is common when your PostgreSQL server shuts down, reboots, or a network partition otherwise causes it to become unavailable while your pool has connected clients.
Example:
```js
const Pool = require('pg-pool')
const pool = new Pool()
// attach an error handler to the pool for when a connected, idle client
// receives an error by being disconnected, etc
pool.on('error', function(error, client) {
// handle this in the same way you would treat process.on('uncaughtException')
// it is supplied the error as well as the idle client which received the error
})
```
#### connect
Fired whenever the pool creates a __new__ `pg.Client` instance and successfully connects it to the backend.
Example:
```js
const Pool = require('pg-pool')
const pool = new Pool()
const count = 0
pool.on('connect', client => {
client.count = count++
})
pool
.connect()
.then(client => {
return client
.query('SELECT $1::int AS "clientCount"', [client.count])
.then(res => console.log(res.rows[0].clientCount)) // outputs 0
.then(() => client)
})
.then(client => client.release())
```
#### acquire
Fired whenever a client is acquired from the pool
Example:
This allows you to count the number of clients which have ever been acquired from the pool.
```js
const Pool = require('pg-pool')
const pool = new Pool()
const acquireCount = 0
pool.on('acquire', function (client) {
acquireCount++
})
const connectCount = 0
pool.on('connect', function () {
connectCount++
})
for (let i = 0; i < 200; i++) {
pool.query('SELECT NOW()')
}
setTimeout(function () {
console.log('connect count:', connectCount) // output: connect count: 10
console.log('acquire count:', acquireCount) // output: acquire count: 200
}, 100)
```
### environment variables
pg-pool & node-postgres support some of the same environment variables as `psql` supports. The most common are:
```
PGDATABASE=my_db
PGUSER=username
PGPASSWORD="my awesome password"
PGPORT=5432
PGSSLMODE=require
```
Usually I will export these into my local environment via a `.env` file with environment settings or export them in `~/.bash_profile` or something similar. This way I get configurability which works with both the postgres suite of tools (`psql`, `pg_dump`, `pg_restore`) and node, I can vary the environment variables locally and in production, and it supports the concept of a [12-factor app](http://12factor.net/) out of the box.
## bring your own promise
In versions of node `<=0.12.x` there is no native promise implementation available globally. You can polyfill the promise globally like this:
```js
// first run `npm install promise-polyfill --save
if (typeof Promise == 'undefined') {
global.Promise = require('promise-polyfill')
}
```
You can use any other promise implementation you'd like. The pool also allows you to configure the promise implementation on a per-pool level:
```js
const bluebirdPool = new Pool({
Promise: require('bluebird')
})
```
__please note:__ in node `<=0.12.x` the pool will throw if you do not provide a promise constructor in one of the two ways mentioned above. In node `>=4.0.0` the pool will use the native promise implementation by default; however, the two methods above still allow you to "bring your own."
## maxUses and read-replica autoscaling (e.g. AWS Aurora)
The maxUses config option can help an application instance rebalance load against a replica set that has been auto-scaled after the connection pool is already full of healthy connections.
The mechanism here is that a connection is considered "expended" after it has been acquired and released `maxUses` number of times. Depending on the load on your system, this means there will be an approximate time in which any given connection will live, thus creating a window for rebalancing.
Imagine a scenario where you have 10 app instances providing an API running against a replica cluster of 3 that are accessed via a round-robin DNS entry. Each instance runs a connection pool size of 20. With an ambient load of 50 requests per second, the connection pool will likely fill up in a few minutes with healthy connections.
If you have weekly bursts of traffic which peak at 1,000 requests per second, you might want to grow your replicas to 10 during this period. Without setting `maxUses`, the new replicas will not be adopted by the app servers without an intervention -- namely, restarting each in turn in order to build up new connection pools that are balanced against all the replicas. Adding additional app server instances will help to some extent because they will adopt all the replicas in an even way, but the initial app servers will continue to focus additional load on the original replicas.
This is where the `maxUses` configuration option comes into play. Setting `maxUses` to 7500 will ensure that over a period of 30 minutes or so the new replicas will be adopted as the pre-existing connections are closed and replaced with new ones, thus creating a window for eventual balance.
You'll want to test based on your own scenarios, but one way to make a first guess at `maxUses` is to identify an acceptable window for rebalancing and then solve for the value:
```
maxUses = rebalanceWindowSeconds * totalRequestsPerSecond / numAppInstances / poolSize
```
In the example above, assuming we acquire and release 1 connection per request and we are aiming for a 30 minute rebalancing window:
```
maxUses = rebalanceWindowSeconds * totalRequestsPerSecond / numAppInstances / poolSize
7200 = 1800 * 1000 / 10 / 25
```
## tests
To run tests clone the repo, `npm i` in the working dir, and then run `npm test`
## contributions
I love contributions. Please make sure they have tests, and submit a PR. If you're not sure if the issue is worth it or will be accepted it never hurts to open an issue to begin the conversation. If you're interested in keeping up with node-postgres releated stuff, you can follow me on twitter at [@briancarlson](https://twitter.com/briancarlson) - I generally announce any noteworthy updates there.
## license
The MIT License (MIT)
Copyright (c) 2016 Brian M. Carlson
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

5
node_modules/pg-pool/esm/index.mjs generated vendored Normal file
View File

@ -0,0 +1,5 @@
// ESM wrapper for pg-pool
import Pool from '../index.js'
// Export as default only to match CJS module
export default Pool

479
node_modules/pg-pool/index.js generated vendored Normal file
View File

@ -0,0 +1,479 @@
'use strict'
const EventEmitter = require('events').EventEmitter
const NOOP = function () {}
const removeWhere = (list, predicate) => {
const i = list.findIndex(predicate)
return i === -1 ? undefined : list.splice(i, 1)[0]
}
class IdleItem {
constructor(client, idleListener, timeoutId) {
this.client = client
this.idleListener = idleListener
this.timeoutId = timeoutId
}
}
class PendingItem {
constructor(callback) {
this.callback = callback
}
}
function throwOnDoubleRelease() {
throw new Error('Release called on client which has already been released to the pool.')
}
function promisify(Promise, callback) {
if (callback) {
return { callback: callback, result: undefined }
}
let rej
let res
const cb = function (err, client) {
err ? rej(err) : res(client)
}
const result = new Promise(function (resolve, reject) {
res = resolve
rej = reject
}).catch((err) => {
// replace the stack trace that leads to `TCP.onStreamRead` with one that leads back to the
// application that created the query
Error.captureStackTrace(err)
throw err
})
return { callback: cb, result: result }
}
function makeIdleListener(pool, client) {
return function idleListener(err) {
err.client = client
client.removeListener('error', idleListener)
client.on('error', () => {
pool.log('additional client error after disconnection due to error', err)
})
pool._remove(client)
// TODO - document that once the pool emits an error
// the client has already been closed & purged and is unusable
pool.emit('error', err, client)
}
}
class Pool extends EventEmitter {
constructor(options, Client) {
super()
this.options = Object.assign({}, options)
if (options != null && 'password' in options) {
// "hiding" the password so it doesn't show up in stack traces
// or if the client is console.logged
Object.defineProperty(this.options, 'password', {
configurable: true,
enumerable: false,
writable: true,
value: options.password,
})
}
if (options != null && options.ssl && options.ssl.key) {
// "hiding" the ssl->key so it doesn't show up in stack traces
// or if the client is console.logged
Object.defineProperty(this.options.ssl, 'key', {
enumerable: false,
})
}
this.options.max = this.options.max || this.options.poolSize || 10
this.options.min = this.options.min || 0
this.options.maxUses = this.options.maxUses || Infinity
this.options.allowExitOnIdle = this.options.allowExitOnIdle || false
this.options.maxLifetimeSeconds = this.options.maxLifetimeSeconds || 0
this.log = this.options.log || function () {}
this.Client = this.options.Client || Client || require('pg').Client
this.Promise = this.options.Promise || global.Promise
if (typeof this.options.idleTimeoutMillis === 'undefined') {
this.options.idleTimeoutMillis = 10000
}
this._clients = []
this._idle = []
this._expired = new WeakSet()
this._pendingQueue = []
this._endCallback = undefined
this.ending = false
this.ended = false
}
_isFull() {
return this._clients.length >= this.options.max
}
_isAboveMin() {
return this._clients.length > this.options.min
}
_pulseQueue() {
this.log('pulse queue')
if (this.ended) {
this.log('pulse queue ended')
return
}
if (this.ending) {
this.log('pulse queue on ending')
if (this._idle.length) {
this._idle.slice().map((item) => {
this._remove(item.client)
})
}
if (!this._clients.length) {
this.ended = true
this._endCallback()
}
return
}
// if we don't have any waiting, do nothing
if (!this._pendingQueue.length) {
this.log('no queued requests')
return
}
// if we don't have any idle clients and we have no more room do nothing
if (!this._idle.length && this._isFull()) {
return
}
const pendingItem = this._pendingQueue.shift()
if (this._idle.length) {
const idleItem = this._idle.pop()
clearTimeout(idleItem.timeoutId)
const client = idleItem.client
client.ref && client.ref()
const idleListener = idleItem.idleListener
return this._acquireClient(client, pendingItem, idleListener, false)
}
if (!this._isFull()) {
return this.newClient(pendingItem)
}
throw new Error('unexpected condition')
}
_remove(client, callback) {
const removed = removeWhere(this._idle, (item) => item.client === client)
if (removed !== undefined) {
clearTimeout(removed.timeoutId)
}
this._clients = this._clients.filter((c) => c !== client)
const context = this
client.end(() => {
context.emit('remove', client)
if (typeof callback === 'function') {
callback()
}
})
}
connect(cb) {
if (this.ending) {
const err = new Error('Cannot use a pool after calling end on the pool')
return cb ? cb(err) : this.Promise.reject(err)
}
const response = promisify(this.Promise, cb)
const result = response.result
// if we don't have to connect a new client, don't do so
if (this._isFull() || this._idle.length) {
// if we have idle clients schedule a pulse immediately
if (this._idle.length) {
process.nextTick(() => this._pulseQueue())
}
if (!this.options.connectionTimeoutMillis) {
this._pendingQueue.push(new PendingItem(response.callback))
return result
}
const queueCallback = (err, res, done) => {
clearTimeout(tid)
response.callback(err, res, done)
}
const pendingItem = new PendingItem(queueCallback)
// set connection timeout on checking out an existing client
const tid = setTimeout(() => {
// remove the callback from pending waiters because
// we're going to call it with a timeout error
removeWhere(this._pendingQueue, (i) => i.callback === queueCallback)
pendingItem.timedOut = true
response.callback(new Error('timeout exceeded when trying to connect'))
}, this.options.connectionTimeoutMillis)
if (tid.unref) {
tid.unref()
}
this._pendingQueue.push(pendingItem)
return result
}
this.newClient(new PendingItem(response.callback))
return result
}
newClient(pendingItem) {
const client = new this.Client(this.options)
this._clients.push(client)
const idleListener = makeIdleListener(this, client)
this.log('checking client timeout')
// connection timeout logic
let tid
let timeoutHit = false
if (this.options.connectionTimeoutMillis) {
tid = setTimeout(() => {
this.log('ending client due to timeout')
timeoutHit = true
// force kill the node driver, and let libpq do its teardown
client.connection ? client.connection.stream.destroy() : client.end()
}, this.options.connectionTimeoutMillis)
}
this.log('connecting new client')
client.connect((err) => {
if (tid) {
clearTimeout(tid)
}
client.on('error', idleListener)
if (err) {
this.log('client failed to connect', err)
// remove the dead client from our list of clients
this._clients = this._clients.filter((c) => c !== client)
if (timeoutHit) {
err = new Error('Connection terminated due to connection timeout', { cause: err })
}
// this client wont be released, so move on immediately
this._pulseQueue()
if (!pendingItem.timedOut) {
pendingItem.callback(err, undefined, NOOP)
}
} else {
this.log('new client connected')
if (this.options.maxLifetimeSeconds !== 0) {
const maxLifetimeTimeout = setTimeout(() => {
this.log('ending client due to expired lifetime')
this._expired.add(client)
const idleIndex = this._idle.findIndex((idleItem) => idleItem.client === client)
if (idleIndex !== -1) {
this._acquireClient(
client,
new PendingItem((err, client, clientRelease) => clientRelease()),
idleListener,
false
)
}
}, this.options.maxLifetimeSeconds * 1000)
maxLifetimeTimeout.unref()
client.once('end', () => clearTimeout(maxLifetimeTimeout))
}
return this._acquireClient(client, pendingItem, idleListener, true)
}
})
}
// acquire a client for a pending work item
_acquireClient(client, pendingItem, idleListener, isNew) {
if (isNew) {
this.emit('connect', client)
}
this.emit('acquire', client)
client.release = this._releaseOnce(client, idleListener)
client.removeListener('error', idleListener)
if (!pendingItem.timedOut) {
if (isNew && this.options.verify) {
this.options.verify(client, (err) => {
if (err) {
client.release(err)
return pendingItem.callback(err, undefined, NOOP)
}
pendingItem.callback(undefined, client, client.release)
})
} else {
pendingItem.callback(undefined, client, client.release)
}
} else {
if (isNew && this.options.verify) {
this.options.verify(client, client.release)
} else {
client.release()
}
}
}
// returns a function that wraps _release and throws if called more than once
_releaseOnce(client, idleListener) {
let released = false
return (err) => {
if (released) {
throwOnDoubleRelease()
}
released = true
this._release(client, idleListener, err)
}
}
// release a client back to the poll, include an error
// to remove it from the pool
_release(client, idleListener, err) {
client.on('error', idleListener)
client._poolUseCount = (client._poolUseCount || 0) + 1
this.emit('release', err, client)
// TODO(bmc): expose a proper, public interface _queryable and _ending
if (err || this.ending || !client._queryable || client._ending || client._poolUseCount >= this.options.maxUses) {
if (client._poolUseCount >= this.options.maxUses) {
this.log('remove expended client')
}
return this._remove(client, this._pulseQueue.bind(this))
}
const isExpired = this._expired.has(client)
if (isExpired) {
this.log('remove expired client')
this._expired.delete(client)
return this._remove(client, this._pulseQueue.bind(this))
}
// idle timeout
let tid
if (this.options.idleTimeoutMillis && this._isAboveMin()) {
tid = setTimeout(() => {
this.log('remove idle client')
this._remove(client, this._pulseQueue.bind(this))
}, this.options.idleTimeoutMillis)
if (this.options.allowExitOnIdle) {
// allow Node to exit if this is all that's left
tid.unref()
}
}
if (this.options.allowExitOnIdle) {
client.unref()
}
this._idle.push(new IdleItem(client, idleListener, tid))
this._pulseQueue()
}
query(text, values, cb) {
// guard clause against passing a function as the first parameter
if (typeof text === 'function') {
const response = promisify(this.Promise, text)
setImmediate(function () {
return response.callback(new Error('Passing a function as the first parameter to pool.query is not supported'))
})
return response.result
}
// allow plain text query without values
if (typeof values === 'function') {
cb = values
values = undefined
}
const response = promisify(this.Promise, cb)
cb = response.callback
this.connect((err, client) => {
if (err) {
return cb(err)
}
let clientReleased = false
const onError = (err) => {
if (clientReleased) {
return
}
clientReleased = true
client.release(err)
cb(err)
}
client.once('error', onError)
this.log('dispatching query')
try {
client.query(text, values, (err, res) => {
this.log('query dispatched')
client.removeListener('error', onError)
if (clientReleased) {
return
}
clientReleased = true
client.release(err)
if (err) {
return cb(err)
}
return cb(undefined, res)
})
} catch (err) {
client.release(err)
return cb(err)
}
})
return response.result
}
end(cb) {
this.log('ending')
if (this.ending) {
const err = new Error('Called end on pool more than once')
return cb ? cb(err) : this.Promise.reject(err)
}
this.ending = true
const promised = promisify(this.Promise, cb)
this._endCallback = promised.callback
this._pulseQueue()
return promised.result
}
get waitingCount() {
return this._pendingQueue.length
}
get idleCount() {
return this._idle.length
}
get expiredCount() {
return this._clients.reduce((acc, client) => acc + (this._expired.has(client) ? 1 : 0), 0)
}
get totalCount() {
return this._clients.length
}
}
module.exports = Pool

51
node_modules/pg-pool/package.json generated vendored Normal file
View File

@ -0,0 +1,51 @@
{
"name": "pg-pool",
"version": "3.10.1",
"description": "Connection pool for node-postgres",
"main": "index.js",
"exports": {
".": {
"import": "./esm/index.mjs",
"require": "./index.js",
"default": "./index.js"
}
},
"directories": {
"test": "test"
},
"scripts": {
"test": " node_modules/.bin/mocha"
},
"repository": {
"type": "git",
"url": "git://github.com/brianc/node-postgres.git",
"directory": "packages/pg-pool"
},
"keywords": [
"pg",
"postgres",
"pool",
"database"
],
"author": "Brian M. Carlson",
"license": "MIT",
"bugs": {
"url": "https://github.com/brianc/node-pg-pool/issues"
},
"homepage": "https://github.com/brianc/node-pg-pool#readme",
"devDependencies": {
"bluebird": "3.7.2",
"co": "4.6.0",
"expect.js": "0.3.1",
"lodash": "^4.17.11",
"mocha": "^10.5.2"
},
"peerDependencies": {
"pg": ">=8.0"
},
"files": [
"index.js",
"esm"
],
"gitHead": "cd877a57612a39335a97b593111710d26126279d"
}

21
node_modules/pg-protocol/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2010 - 2021 Brian Carlson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

3
node_modules/pg-protocol/README.md generated vendored Normal file
View File

@ -0,0 +1,3 @@
# pg-protocol
Low level postgres wire protocol parser and serializer written in Typescript. Used by node-postgres. Needs more documentation. :smile:

1
node_modules/pg-protocol/dist/b.d.ts generated vendored Normal file
View File

@ -0,0 +1 @@
export {};

23
node_modules/pg-protocol/dist/b.js generated vendored Normal file
View File

@ -0,0 +1,23 @@
"use strict";
// file for microbenchmarking
Object.defineProperty(exports, "__esModule", { value: true });
const buffer_reader_1 = require("./buffer-reader");
const LOOPS = 1000;
let count = 0;
const start = performance.now();
const reader = new buffer_reader_1.BufferReader();
const buffer = Buffer.from([33, 33, 33, 33, 33, 33, 33, 0]);
const run = () => {
if (count > LOOPS) {
console.log(performance.now() - start);
return;
}
count++;
for (let i = 0; i < LOOPS; i++) {
reader.setBuffer(0, buffer);
reader.cstring();
}
setImmediate(run);
};
run();
//# sourceMappingURL=b.js.map

1
node_modules/pg-protocol/dist/b.js.map generated vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"b.js","sourceRoot":"","sources":["../src/b.ts"],"names":[],"mappings":";AAAA,6BAA6B;;AAE7B,mDAA8C;AAE9C,MAAM,KAAK,GAAG,IAAI,CAAA;AAClB,IAAI,KAAK,GAAG,CAAC,CAAA;AACb,MAAM,KAAK,GAAG,WAAW,CAAC,GAAG,EAAE,CAAA;AAE/B,MAAM,MAAM,GAAG,IAAI,4BAAY,EAAE,CAAA;AACjC,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAA;AAE3D,MAAM,GAAG,GAAG,GAAG,EAAE;IACf,IAAI,KAAK,GAAG,KAAK,EAAE;QACjB,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,EAAE,GAAG,KAAK,CAAC,CAAA;QACtC,OAAM;KACP;IACD,KAAK,EAAE,CAAA;IACP,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC,EAAE,EAAE;QAC9B,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;QAC3B,MAAM,CAAC,OAAO,EAAE,CAAA;KACjB;IACD,YAAY,CAAC,GAAG,CAAC,CAAA;AACnB,CAAC,CAAA;AAED,GAAG,EAAE,CAAA"}

15
node_modules/pg-protocol/dist/buffer-reader.d.ts generated vendored Normal file
View File

@ -0,0 +1,15 @@
/// <reference types="node" />
export declare class BufferReader {
private offset;
private buffer;
private encoding;
constructor(offset?: number);
setBuffer(offset: number, buffer: Buffer): void;
int16(): number;
byte(): number;
int32(): number;
uint32(): number;
string(length: number): string;
cstring(): string;
bytes(length: number): Buffer;
}

56
node_modules/pg-protocol/dist/buffer-reader.js generated vendored Normal file
View File

@ -0,0 +1,56 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BufferReader = void 0;
const emptyBuffer = Buffer.allocUnsafe(0);
class BufferReader {
constructor(offset = 0) {
this.offset = offset;
this.buffer = emptyBuffer;
// TODO(bmc): support non-utf8 encoding?
this.encoding = 'utf-8';
}
setBuffer(offset, buffer) {
this.offset = offset;
this.buffer = buffer;
}
int16() {
const result = this.buffer.readInt16BE(this.offset);
this.offset += 2;
return result;
}
byte() {
const result = this.buffer[this.offset];
this.offset++;
return result;
}
int32() {
const result = this.buffer.readInt32BE(this.offset);
this.offset += 4;
return result;
}
uint32() {
const result = this.buffer.readUInt32BE(this.offset);
this.offset += 4;
return result;
}
string(length) {
const result = this.buffer.toString(this.encoding, this.offset, this.offset + length);
this.offset += length;
return result;
}
cstring() {
const start = this.offset;
let end = start;
// eslint-disable-next-line no-empty
while (this.buffer[end++] !== 0) { }
this.offset = end;
return this.buffer.toString(this.encoding, start, end - 1);
}
bytes(length) {
const result = this.buffer.slice(this.offset, this.offset + length);
this.offset += length;
return result;
}
}
exports.BufferReader = BufferReader;
//# sourceMappingURL=buffer-reader.js.map

1
node_modules/pg-protocol/dist/buffer-reader.js.map generated vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"buffer-reader.js","sourceRoot":"","sources":["../src/buffer-reader.ts"],"names":[],"mappings":";;;AAAA,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAA;AAEzC,MAAa,YAAY;IAMvB,YAAoB,SAAiB,CAAC;QAAlB,WAAM,GAAN,MAAM,CAAY;QAL9B,WAAM,GAAW,WAAW,CAAA;QAEpC,wCAAwC;QAChC,aAAQ,GAAW,OAAO,CAAA;IAEO,CAAC;IAEnC,SAAS,CAAC,MAAc,EAAE,MAAc;QAC7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;QACpB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;IACtB,CAAC;IAEM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACnD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,IAAI;QACT,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACvC,IAAI,CAAC,MAAM,EAAE,CAAA;QACb,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACnD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,MAAM;QACX,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACpD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,MAAM,CAAC,MAAc;QAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAA;QACrF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAA;QACrB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,OAAO;QACZ,MAAM,KAAK,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,GAAG,GAAG,KAAK,CAAA;QACf,oCAAoC;QACpC,OAAO,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC,EAAE,GAAE;QACnC,IAAI,CAAC,MAAM,GAAG,GAAG,CAAA;QACjB,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,EAAE,GAAG,GAAG,CAAC,CAAC,CAAA;IAC5D,CAAC;IAEM,KAAK,CAAC,MAAc;QACzB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAA;QACnE,IAAI,CAAC,MAAM,IAAI,MAAM,CAAA;QACrB,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAzDD,oCAyDC"}

16
node_modules/pg-protocol/dist/buffer-writer.d.ts generated vendored Normal file
View File

@ -0,0 +1,16 @@
/// <reference types="node" />
export declare class Writer {
private size;
private buffer;
private offset;
private headerPosition;
constructor(size?: number);
private ensure;
addInt32(num: number): Writer;
addInt16(num: number): Writer;
addCString(string: string): Writer;
addString(string?: string): Writer;
add(otherBuffer: Buffer): Writer;
private join;
flush(code?: number): Buffer;
}

81
node_modules/pg-protocol/dist/buffer-writer.js generated vendored Normal file
View File

@ -0,0 +1,81 @@
"use strict";
//binary data writer tuned for encoding binary specific to the postgres binary protocol
Object.defineProperty(exports, "__esModule", { value: true });
exports.Writer = void 0;
class Writer {
constructor(size = 256) {
this.size = size;
this.offset = 5;
this.headerPosition = 0;
this.buffer = Buffer.allocUnsafe(size);
}
ensure(size) {
const remaining = this.buffer.length - this.offset;
if (remaining < size) {
const oldBuffer = this.buffer;
// exponential growth factor of around ~ 1.5
// https://stackoverflow.com/questions/2269063/buffer-growth-strategy
const newSize = oldBuffer.length + (oldBuffer.length >> 1) + size;
this.buffer = Buffer.allocUnsafe(newSize);
oldBuffer.copy(this.buffer);
}
}
addInt32(num) {
this.ensure(4);
this.buffer[this.offset++] = (num >>> 24) & 0xff;
this.buffer[this.offset++] = (num >>> 16) & 0xff;
this.buffer[this.offset++] = (num >>> 8) & 0xff;
this.buffer[this.offset++] = (num >>> 0) & 0xff;
return this;
}
addInt16(num) {
this.ensure(2);
this.buffer[this.offset++] = (num >>> 8) & 0xff;
this.buffer[this.offset++] = (num >>> 0) & 0xff;
return this;
}
addCString(string) {
if (!string) {
this.ensure(1);
}
else {
const len = Buffer.byteLength(string);
this.ensure(len + 1); // +1 for null terminator
this.buffer.write(string, this.offset, 'utf-8');
this.offset += len;
}
this.buffer[this.offset++] = 0; // null terminator
return this;
}
addString(string = '') {
const len = Buffer.byteLength(string);
this.ensure(len);
this.buffer.write(string, this.offset);
this.offset += len;
return this;
}
add(otherBuffer) {
this.ensure(otherBuffer.length);
otherBuffer.copy(this.buffer, this.offset);
this.offset += otherBuffer.length;
return this;
}
join(code) {
if (code) {
this.buffer[this.headerPosition] = code;
//length is everything in this packet minus the code
const length = this.offset - (this.headerPosition + 1);
this.buffer.writeInt32BE(length, this.headerPosition + 1);
}
return this.buffer.slice(code ? 0 : 5, this.offset);
}
flush(code) {
const result = this.join(code);
this.offset = 5;
this.headerPosition = 0;
this.buffer = Buffer.allocUnsafe(this.size);
return result;
}
}
exports.Writer = Writer;
//# sourceMappingURL=buffer-writer.js.map

1
node_modules/pg-protocol/dist/buffer-writer.js.map generated vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"buffer-writer.js","sourceRoot":"","sources":["../src/buffer-writer.ts"],"names":[],"mappings":";AAAA,uFAAuF;;;AAEvF,MAAa,MAAM;IAIjB,YAAoB,OAAO,GAAG;QAAV,SAAI,GAAJ,IAAI,CAAM;QAFtB,WAAM,GAAW,CAAC,CAAA;QAClB,mBAAc,GAAW,CAAC,CAAA;QAEhC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,CAAA;IACxC,CAAC;IAEO,MAAM,CAAC,IAAY;QACzB,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAClD,IAAI,SAAS,GAAG,IAAI,EAAE;YACpB,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAA;YAC7B,4CAA4C;YAC5C,qEAAqE;YACrE,MAAM,OAAO,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,SAAS,CAAC,MAAM,IAAI,CAAC,CAAC,GAAG,IAAI,CAAA;YACjE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,OAAO,CAAC,CAAA;YACzC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;SAC5B;IACH,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACd,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,CAAA;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,CAAA;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACd,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,UAAU,CAAC,MAAc;QAC9B,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SACf;aAAM;YACL,MAAM,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;YACrC,IAAI,CAAC,MAAM,CAAC,GAAG,GAAG,CAAC,CAAC,CAAA,CAAC,yBAAyB;YAC9C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;YAC/C,IAAI,CAAC,MAAM,IAAI,GAAG,CAAA;SACnB;QAED,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAA,CAAC,kBAAkB;QACjD,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,SAAS,CAAC,SAAiB,EAAE;QAClC,MAAM,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;QACrC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QAChB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QACtC,IAAI,CAAC,MAAM,IAAI,GAAG,CAAA;QAClB,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,GAAG,CAAC,WAAmB;QAC5B,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,MAAM,CAAC,CAAA;QAC/B,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QAC1C,IAAI,CAAC,MAAM,IAAI,WAAW,CAAC,MAAM,CAAA;QACjC,OAAO,IAAI,CAAA;IACb,CAAC;IAEO,IAAI,CAAC,IAAa;QACxB,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,GAAG,IAAI,CAAA;YACvC,oDAAoD;YACpD,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAA;YACtD,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAA;SAC1D;QACD,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;IACrD,CAAC;IAEM,KAAK,CAAC,IAAa;QACxB,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC9B,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;QACf,IAAI,CAAC,cAAc,GAAG,CAAC,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC3C,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAlFD,wBAkFC"}

View File

@ -0,0 +1 @@
export {};

524
node_modules/pg-protocol/dist/inbound-parser.test.js generated vendored Normal file
View File

@ -0,0 +1,524 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const test_buffers_1 = __importDefault(require("./testing/test-buffers"));
const buffer_list_1 = __importDefault(require("./testing/buffer-list"));
const _1 = require(".");
const assert_1 = __importDefault(require("assert"));
const stream_1 = require("stream");
const authOkBuffer = test_buffers_1.default.authenticationOk();
const paramStatusBuffer = test_buffers_1.default.parameterStatus('client_encoding', 'UTF8');
const readyForQueryBuffer = test_buffers_1.default.readyForQuery();
const backendKeyDataBuffer = test_buffers_1.default.backendKeyData(1, 2);
const commandCompleteBuffer = test_buffers_1.default.commandComplete('SELECT 3');
const parseCompleteBuffer = test_buffers_1.default.parseComplete();
const bindCompleteBuffer = test_buffers_1.default.bindComplete();
const portalSuspendedBuffer = test_buffers_1.default.portalSuspended();
const row1 = {
name: 'id',
tableID: 1,
attributeNumber: 2,
dataTypeID: 3,
dataTypeSize: 4,
typeModifier: 5,
formatCode: 0,
};
const oneRowDescBuff = test_buffers_1.default.rowDescription([row1]);
row1.name = 'bang';
const twoRowBuf = test_buffers_1.default.rowDescription([
row1,
{
name: 'whoah',
tableID: 10,
attributeNumber: 11,
dataTypeID: 12,
dataTypeSize: 13,
typeModifier: 14,
formatCode: 0,
},
]);
const rowWithBigOids = {
name: 'bigoid',
tableID: 3000000001,
attributeNumber: 2,
dataTypeID: 3000000003,
dataTypeSize: 4,
typeModifier: 5,
formatCode: 0,
};
const bigOidDescBuff = test_buffers_1.default.rowDescription([rowWithBigOids]);
const emptyRowFieldBuf = test_buffers_1.default.dataRow([]);
const oneFieldBuf = test_buffers_1.default.dataRow(['test']);
const expectedAuthenticationOkayMessage = {
name: 'authenticationOk',
length: 8,
};
const expectedParameterStatusMessage = {
name: 'parameterStatus',
parameterName: 'client_encoding',
parameterValue: 'UTF8',
length: 25,
};
const expectedBackendKeyDataMessage = {
name: 'backendKeyData',
processID: 1,
secretKey: 2,
};
const expectedReadyForQueryMessage = {
name: 'readyForQuery',
length: 5,
status: 'I',
};
const expectedCommandCompleteMessage = {
name: 'commandComplete',
length: 13,
text: 'SELECT 3',
};
const emptyRowDescriptionBuffer = new buffer_list_1.default()
.addInt16(0) // number of fields
.join(true, 'T');
const expectedEmptyRowDescriptionMessage = {
name: 'rowDescription',
length: 6,
fieldCount: 0,
fields: [],
};
const expectedOneRowMessage = {
name: 'rowDescription',
length: 27,
fieldCount: 1,
fields: [
{
name: 'id',
tableID: 1,
columnID: 2,
dataTypeID: 3,
dataTypeSize: 4,
dataTypeModifier: 5,
format: 'text',
},
],
};
const expectedTwoRowMessage = {
name: 'rowDescription',
length: 53,
fieldCount: 2,
fields: [
{
name: 'bang',
tableID: 1,
columnID: 2,
dataTypeID: 3,
dataTypeSize: 4,
dataTypeModifier: 5,
format: 'text',
},
{
name: 'whoah',
tableID: 10,
columnID: 11,
dataTypeID: 12,
dataTypeSize: 13,
dataTypeModifier: 14,
format: 'text',
},
],
};
const expectedBigOidMessage = {
name: 'rowDescription',
length: 31,
fieldCount: 1,
fields: [
{
name: 'bigoid',
tableID: 3000000001,
columnID: 2,
dataTypeID: 3000000003,
dataTypeSize: 4,
dataTypeModifier: 5,
format: 'text',
},
],
};
const emptyParameterDescriptionBuffer = new buffer_list_1.default()
.addInt16(0) // number of parameters
.join(true, 't');
const oneParameterDescBuf = test_buffers_1.default.parameterDescription([1111]);
const twoParameterDescBuf = test_buffers_1.default.parameterDescription([2222, 3333]);
const expectedEmptyParameterDescriptionMessage = {
name: 'parameterDescription',
length: 6,
parameterCount: 0,
dataTypeIDs: [],
};
const expectedOneParameterMessage = {
name: 'parameterDescription',
length: 10,
parameterCount: 1,
dataTypeIDs: [1111],
};
const expectedTwoParameterMessage = {
name: 'parameterDescription',
length: 14,
parameterCount: 2,
dataTypeIDs: [2222, 3333],
};
const testForMessage = function (buffer, expectedMessage) {
it('receives and parses ' + expectedMessage.name, () => __awaiter(this, void 0, void 0, function* () {
const messages = yield parseBuffers([buffer]);
const [lastMessage] = messages;
for (const key in expectedMessage) {
assert_1.default.deepEqual(lastMessage[key], expectedMessage[key]);
}
}));
};
const plainPasswordBuffer = test_buffers_1.default.authenticationCleartextPassword();
const md5PasswordBuffer = test_buffers_1.default.authenticationMD5Password();
const SASLBuffer = test_buffers_1.default.authenticationSASL();
const SASLContinueBuffer = test_buffers_1.default.authenticationSASLContinue();
const SASLFinalBuffer = test_buffers_1.default.authenticationSASLFinal();
const expectedPlainPasswordMessage = {
name: 'authenticationCleartextPassword',
};
const expectedMD5PasswordMessage = {
name: 'authenticationMD5Password',
salt: Buffer.from([1, 2, 3, 4]),
};
const expectedSASLMessage = {
name: 'authenticationSASL',
mechanisms: ['SCRAM-SHA-256'],
};
const expectedSASLContinueMessage = {
name: 'authenticationSASLContinue',
data: 'data',
};
const expectedSASLFinalMessage = {
name: 'authenticationSASLFinal',
data: 'data',
};
const notificationResponseBuffer = test_buffers_1.default.notification(4, 'hi', 'boom');
const expectedNotificationResponseMessage = {
name: 'notification',
processId: 4,
channel: 'hi',
payload: 'boom',
};
const parseBuffers = (buffers) => __awaiter(void 0, void 0, void 0, function* () {
const stream = new stream_1.PassThrough();
for (const buffer of buffers) {
stream.write(buffer);
}
stream.end();
const msgs = [];
yield (0, _1.parse)(stream, (msg) => msgs.push(msg));
return msgs;
});
describe('PgPacketStream', function () {
testForMessage(authOkBuffer, expectedAuthenticationOkayMessage);
testForMessage(plainPasswordBuffer, expectedPlainPasswordMessage);
testForMessage(md5PasswordBuffer, expectedMD5PasswordMessage);
testForMessage(SASLBuffer, expectedSASLMessage);
testForMessage(SASLContinueBuffer, expectedSASLContinueMessage);
// this exercises a found bug in the parser:
// https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084
// and adds a test which is deterministic, rather than relying on network packet chunking
const extendedSASLContinueBuffer = Buffer.concat([SASLContinueBuffer, Buffer.from([1, 2, 3, 4])]);
testForMessage(extendedSASLContinueBuffer, expectedSASLContinueMessage);
testForMessage(SASLFinalBuffer, expectedSASLFinalMessage);
// this exercises a found bug in the parser:
// https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084
// and adds a test which is deterministic, rather than relying on network packet chunking
const extendedSASLFinalBuffer = Buffer.concat([SASLFinalBuffer, Buffer.from([1, 2, 4, 5])]);
testForMessage(extendedSASLFinalBuffer, expectedSASLFinalMessage);
testForMessage(paramStatusBuffer, expectedParameterStatusMessage);
testForMessage(backendKeyDataBuffer, expectedBackendKeyDataMessage);
testForMessage(readyForQueryBuffer, expectedReadyForQueryMessage);
testForMessage(commandCompleteBuffer, expectedCommandCompleteMessage);
testForMessage(notificationResponseBuffer, expectedNotificationResponseMessage);
testForMessage(test_buffers_1.default.emptyQuery(), {
name: 'emptyQuery',
length: 4,
});
testForMessage(Buffer.from([0x6e, 0, 0, 0, 4]), {
name: 'noData',
});
describe('rowDescription messages', function () {
testForMessage(emptyRowDescriptionBuffer, expectedEmptyRowDescriptionMessage);
testForMessage(oneRowDescBuff, expectedOneRowMessage);
testForMessage(twoRowBuf, expectedTwoRowMessage);
testForMessage(bigOidDescBuff, expectedBigOidMessage);
});
describe('parameterDescription messages', function () {
testForMessage(emptyParameterDescriptionBuffer, expectedEmptyParameterDescriptionMessage);
testForMessage(oneParameterDescBuf, expectedOneParameterMessage);
testForMessage(twoParameterDescBuf, expectedTwoParameterMessage);
});
describe('parsing rows', function () {
describe('parsing empty row', function () {
testForMessage(emptyRowFieldBuf, {
name: 'dataRow',
fieldCount: 0,
});
});
describe('parsing data row with fields', function () {
testForMessage(oneFieldBuf, {
name: 'dataRow',
fieldCount: 1,
fields: ['test'],
});
});
});
describe('notice message', function () {
// this uses the same logic as error message
const buff = test_buffers_1.default.notice([{ type: 'C', value: 'code' }]);
testForMessage(buff, {
name: 'notice',
code: 'code',
});
});
testForMessage(test_buffers_1.default.error([]), {
name: 'error',
});
describe('with all the fields', function () {
const buffer = test_buffers_1.default.error([
{
type: 'S',
value: 'ERROR',
},
{
type: 'C',
value: 'code',
},
{
type: 'M',
value: 'message',
},
{
type: 'D',
value: 'details',
},
{
type: 'H',
value: 'hint',
},
{
type: 'P',
value: '100',
},
{
type: 'p',
value: '101',
},
{
type: 'q',
value: 'query',
},
{
type: 'W',
value: 'where',
},
{
type: 'F',
value: 'file',
},
{
type: 'L',
value: 'line',
},
{
type: 'R',
value: 'routine',
},
{
type: 'Z',
value: 'alsdkf',
},
]);
testForMessage(buffer, {
name: 'error',
severity: 'ERROR',
code: 'code',
message: 'message',
detail: 'details',
hint: 'hint',
position: '100',
internalPosition: '101',
internalQuery: 'query',
where: 'where',
file: 'file',
line: 'line',
routine: 'routine',
});
});
testForMessage(parseCompleteBuffer, {
name: 'parseComplete',
});
testForMessage(bindCompleteBuffer, {
name: 'bindComplete',
});
testForMessage(bindCompleteBuffer, {
name: 'bindComplete',
});
testForMessage(test_buffers_1.default.closeComplete(), {
name: 'closeComplete',
});
describe('parses portal suspended message', function () {
testForMessage(portalSuspendedBuffer, {
name: 'portalSuspended',
});
});
describe('parses replication start message', function () {
testForMessage(Buffer.from([0x57, 0x00, 0x00, 0x00, 0x04]), {
name: 'replicationStart',
length: 4,
});
});
describe('copy', () => {
testForMessage(test_buffers_1.default.copyIn(0), {
name: 'copyInResponse',
length: 7,
binary: false,
columnTypes: [],
});
testForMessage(test_buffers_1.default.copyIn(2), {
name: 'copyInResponse',
length: 11,
binary: false,
columnTypes: [0, 1],
});
testForMessage(test_buffers_1.default.copyOut(0), {
name: 'copyOutResponse',
length: 7,
binary: false,
columnTypes: [],
});
testForMessage(test_buffers_1.default.copyOut(3), {
name: 'copyOutResponse',
length: 13,
binary: false,
columnTypes: [0, 1, 2],
});
testForMessage(test_buffers_1.default.copyDone(), {
name: 'copyDone',
length: 4,
});
testForMessage(test_buffers_1.default.copyData(Buffer.from([5, 6, 7])), {
name: 'copyData',
length: 7,
chunk: Buffer.from([5, 6, 7]),
});
});
// since the data message on a stream can randomly divide the incomming
// tcp packets anywhere, we need to make sure we can parse every single
// split on a tcp message
describe('split buffer, single message parsing', function () {
const fullBuffer = test_buffers_1.default.dataRow([null, 'bang', 'zug zug', null, '!']);
it('parses when full buffer comes in', function () {
return __awaiter(this, void 0, void 0, function* () {
const messages = yield parseBuffers([fullBuffer]);
const message = messages[0];
assert_1.default.equal(message.fields.length, 5);
assert_1.default.equal(message.fields[0], null);
assert_1.default.equal(message.fields[1], 'bang');
assert_1.default.equal(message.fields[2], 'zug zug');
assert_1.default.equal(message.fields[3], null);
assert_1.default.equal(message.fields[4], '!');
});
});
const testMessageReceivedAfterSplitAt = function (split) {
return __awaiter(this, void 0, void 0, function* () {
const firstBuffer = Buffer.alloc(fullBuffer.length - split);
const secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length);
fullBuffer.copy(firstBuffer, 0, 0);
fullBuffer.copy(secondBuffer, 0, firstBuffer.length);
const messages = yield parseBuffers([firstBuffer, secondBuffer]);
const message = messages[0];
assert_1.default.equal(message.fields.length, 5);
assert_1.default.equal(message.fields[0], null);
assert_1.default.equal(message.fields[1], 'bang');
assert_1.default.equal(message.fields[2], 'zug zug');
assert_1.default.equal(message.fields[3], null);
assert_1.default.equal(message.fields[4], '!');
});
};
it('parses when split in the middle', function () {
return testMessageReceivedAfterSplitAt(6);
});
it('parses when split at end', function () {
return testMessageReceivedAfterSplitAt(2);
});
it('parses when split at beginning', function () {
return Promise.all([
testMessageReceivedAfterSplitAt(fullBuffer.length - 2),
testMessageReceivedAfterSplitAt(fullBuffer.length - 1),
testMessageReceivedAfterSplitAt(fullBuffer.length - 5),
]);
});
});
describe('split buffer, multiple message parsing', function () {
const dataRowBuffer = test_buffers_1.default.dataRow(['!']);
const readyForQueryBuffer = test_buffers_1.default.readyForQuery();
const fullBuffer = Buffer.alloc(dataRowBuffer.length + readyForQueryBuffer.length);
dataRowBuffer.copy(fullBuffer, 0, 0);
readyForQueryBuffer.copy(fullBuffer, dataRowBuffer.length, 0);
const verifyMessages = function (messages) {
assert_1.default.strictEqual(messages.length, 2);
assert_1.default.deepEqual(messages[0], {
name: 'dataRow',
fieldCount: 1,
length: 11,
fields: ['!'],
});
assert_1.default.equal(messages[0].fields[0], '!');
assert_1.default.deepEqual(messages[1], {
name: 'readyForQuery',
length: 5,
status: 'I',
});
};
// sanity check
it('receives both messages when packet is not split', function () {
return __awaiter(this, void 0, void 0, function* () {
const messages = yield parseBuffers([fullBuffer]);
verifyMessages(messages);
});
});
const splitAndVerifyTwoMessages = function (split) {
return __awaiter(this, void 0, void 0, function* () {
const firstBuffer = Buffer.alloc(fullBuffer.length - split);
const secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length);
fullBuffer.copy(firstBuffer, 0, 0);
fullBuffer.copy(secondBuffer, 0, firstBuffer.length);
const messages = yield parseBuffers([firstBuffer, secondBuffer]);
verifyMessages(messages);
});
};
describe('receives both messages when packet is split', function () {
it('in the middle', function () {
return splitAndVerifyTwoMessages(11);
});
it('at the front', function () {
return Promise.all([
splitAndVerifyTwoMessages(fullBuffer.length - 1),
splitAndVerifyTwoMessages(fullBuffer.length - 4),
splitAndVerifyTwoMessages(fullBuffer.length - 6),
]);
});
it('at the end', function () {
return Promise.all([splitAndVerifyTwoMessages(8), splitAndVerifyTwoMessages(1)]);
});
});
});
});
//# sourceMappingURL=inbound-parser.test.js.map

File diff suppressed because one or more lines are too long

6
node_modules/pg-protocol/dist/index.d.ts generated vendored Normal file
View File

@ -0,0 +1,6 @@
/// <reference types="node" />
import { DatabaseError } from './messages';
import { serialize } from './serializer';
import { MessageCallback } from './parser';
export declare function parse(stream: NodeJS.ReadableStream, callback: MessageCallback): Promise<void>;
export { serialize, DatabaseError };

15
node_modules/pg-protocol/dist/index.js generated vendored Normal file
View File

@ -0,0 +1,15 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.DatabaseError = exports.serialize = exports.parse = void 0;
const messages_1 = require("./messages");
Object.defineProperty(exports, "DatabaseError", { enumerable: true, get: function () { return messages_1.DatabaseError; } });
const serializer_1 = require("./serializer");
Object.defineProperty(exports, "serialize", { enumerable: true, get: function () { return serializer_1.serialize; } });
const parser_1 = require("./parser");
function parse(stream, callback) {
const parser = new parser_1.Parser();
stream.on('data', (buffer) => parser.parse(buffer, callback));
return new Promise((resolve) => stream.on('end', () => resolve()));
}
exports.parse = parse;
//# sourceMappingURL=index.js.map

1
node_modules/pg-protocol/dist/index.js.map generated vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AAAA,yCAA0C;AAUtB,8FAVX,wBAAa,OAUW;AATjC,6CAAwC;AAS/B,0FATA,sBAAS,OASA;AARlB,qCAAkD;AAElD,SAAgB,KAAK,CAAC,MAA6B,EAAE,QAAyB;IAC5E,MAAM,MAAM,GAAG,IAAI,eAAM,EAAE,CAAA;IAC3B,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,MAAc,EAAE,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAA;IACrE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC,CAAA;AACpE,CAAC;AAJD,sBAIC"}

162
node_modules/pg-protocol/dist/messages.d.ts generated vendored Normal file
View File

@ -0,0 +1,162 @@
/// <reference types="node" />
export declare type Mode = 'text' | 'binary';
export declare type MessageName = 'parseComplete' | 'bindComplete' | 'closeComplete' | 'noData' | 'portalSuspended' | 'replicationStart' | 'emptyQuery' | 'copyDone' | 'copyData' | 'rowDescription' | 'parameterDescription' | 'parameterStatus' | 'backendKeyData' | 'notification' | 'readyForQuery' | 'commandComplete' | 'dataRow' | 'copyInResponse' | 'copyOutResponse' | 'authenticationOk' | 'authenticationMD5Password' | 'authenticationCleartextPassword' | 'authenticationSASL' | 'authenticationSASLContinue' | 'authenticationSASLFinal' | 'error' | 'notice';
export interface BackendMessage {
name: MessageName;
length: number;
}
export declare const parseComplete: BackendMessage;
export declare const bindComplete: BackendMessage;
export declare const closeComplete: BackendMessage;
export declare const noData: BackendMessage;
export declare const portalSuspended: BackendMessage;
export declare const replicationStart: BackendMessage;
export declare const emptyQuery: BackendMessage;
export declare const copyDone: BackendMessage;
interface NoticeOrError {
message: string | undefined;
severity: string | undefined;
code: string | undefined;
detail: string | undefined;
hint: string | undefined;
position: string | undefined;
internalPosition: string | undefined;
internalQuery: string | undefined;
where: string | undefined;
schema: string | undefined;
table: string | undefined;
column: string | undefined;
dataType: string | undefined;
constraint: string | undefined;
file: string | undefined;
line: string | undefined;
routine: string | undefined;
}
export declare class DatabaseError extends Error implements NoticeOrError {
readonly length: number;
readonly name: MessageName;
severity: string | undefined;
code: string | undefined;
detail: string | undefined;
hint: string | undefined;
position: string | undefined;
internalPosition: string | undefined;
internalQuery: string | undefined;
where: string | undefined;
schema: string | undefined;
table: string | undefined;
column: string | undefined;
dataType: string | undefined;
constraint: string | undefined;
file: string | undefined;
line: string | undefined;
routine: string | undefined;
constructor(message: string, length: number, name: MessageName);
}
export declare class CopyDataMessage {
readonly length: number;
readonly chunk: Buffer;
readonly name = "copyData";
constructor(length: number, chunk: Buffer);
}
export declare class CopyResponse {
readonly length: number;
readonly name: MessageName;
readonly binary: boolean;
readonly columnTypes: number[];
constructor(length: number, name: MessageName, binary: boolean, columnCount: number);
}
export declare class Field {
readonly name: string;
readonly tableID: number;
readonly columnID: number;
readonly dataTypeID: number;
readonly dataTypeSize: number;
readonly dataTypeModifier: number;
readonly format: Mode;
constructor(name: string, tableID: number, columnID: number, dataTypeID: number, dataTypeSize: number, dataTypeModifier: number, format: Mode);
}
export declare class RowDescriptionMessage {
readonly length: number;
readonly fieldCount: number;
readonly name: MessageName;
readonly fields: Field[];
constructor(length: number, fieldCount: number);
}
export declare class ParameterDescriptionMessage {
readonly length: number;
readonly parameterCount: number;
readonly name: MessageName;
readonly dataTypeIDs: number[];
constructor(length: number, parameterCount: number);
}
export declare class ParameterStatusMessage {
readonly length: number;
readonly parameterName: string;
readonly parameterValue: string;
readonly name: MessageName;
constructor(length: number, parameterName: string, parameterValue: string);
}
export declare class AuthenticationMD5Password implements BackendMessage {
readonly length: number;
readonly salt: Buffer;
readonly name: MessageName;
constructor(length: number, salt: Buffer);
}
export declare class BackendKeyDataMessage {
readonly length: number;
readonly processID: number;
readonly secretKey: number;
readonly name: MessageName;
constructor(length: number, processID: number, secretKey: number);
}
export declare class NotificationResponseMessage {
readonly length: number;
readonly processId: number;
readonly channel: string;
readonly payload: string;
readonly name: MessageName;
constructor(length: number, processId: number, channel: string, payload: string);
}
export declare class ReadyForQueryMessage {
readonly length: number;
readonly status: string;
readonly name: MessageName;
constructor(length: number, status: string);
}
export declare class CommandCompleteMessage {
readonly length: number;
readonly text: string;
readonly name: MessageName;
constructor(length: number, text: string);
}
export declare class DataRowMessage {
length: number;
fields: any[];
readonly fieldCount: number;
readonly name: MessageName;
constructor(length: number, fields: any[]);
}
export declare class NoticeMessage implements BackendMessage, NoticeOrError {
readonly length: number;
readonly message: string | undefined;
constructor(length: number, message: string | undefined);
readonly name = "notice";
severity: string | undefined;
code: string | undefined;
detail: string | undefined;
hint: string | undefined;
position: string | undefined;
internalPosition: string | undefined;
internalQuery: string | undefined;
where: string | undefined;
schema: string | undefined;
table: string | undefined;
column: string | undefined;
dataType: string | undefined;
constraint: string | undefined;
file: string | undefined;
line: string | undefined;
routine: string | undefined;
}
export {};

160
node_modules/pg-protocol/dist/messages.js generated vendored Normal file
View File

@ -0,0 +1,160 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.NoticeMessage = exports.DataRowMessage = exports.CommandCompleteMessage = exports.ReadyForQueryMessage = exports.NotificationResponseMessage = exports.BackendKeyDataMessage = exports.AuthenticationMD5Password = exports.ParameterStatusMessage = exports.ParameterDescriptionMessage = exports.RowDescriptionMessage = exports.Field = exports.CopyResponse = exports.CopyDataMessage = exports.DatabaseError = exports.copyDone = exports.emptyQuery = exports.replicationStart = exports.portalSuspended = exports.noData = exports.closeComplete = exports.bindComplete = exports.parseComplete = void 0;
exports.parseComplete = {
name: 'parseComplete',
length: 5,
};
exports.bindComplete = {
name: 'bindComplete',
length: 5,
};
exports.closeComplete = {
name: 'closeComplete',
length: 5,
};
exports.noData = {
name: 'noData',
length: 5,
};
exports.portalSuspended = {
name: 'portalSuspended',
length: 5,
};
exports.replicationStart = {
name: 'replicationStart',
length: 4,
};
exports.emptyQuery = {
name: 'emptyQuery',
length: 4,
};
exports.copyDone = {
name: 'copyDone',
length: 4,
};
class DatabaseError extends Error {
constructor(message, length, name) {
super(message);
this.length = length;
this.name = name;
}
}
exports.DatabaseError = DatabaseError;
class CopyDataMessage {
constructor(length, chunk) {
this.length = length;
this.chunk = chunk;
this.name = 'copyData';
}
}
exports.CopyDataMessage = CopyDataMessage;
class CopyResponse {
constructor(length, name, binary, columnCount) {
this.length = length;
this.name = name;
this.binary = binary;
this.columnTypes = new Array(columnCount);
}
}
exports.CopyResponse = CopyResponse;
class Field {
constructor(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, format) {
this.name = name;
this.tableID = tableID;
this.columnID = columnID;
this.dataTypeID = dataTypeID;
this.dataTypeSize = dataTypeSize;
this.dataTypeModifier = dataTypeModifier;
this.format = format;
}
}
exports.Field = Field;
class RowDescriptionMessage {
constructor(length, fieldCount) {
this.length = length;
this.fieldCount = fieldCount;
this.name = 'rowDescription';
this.fields = new Array(this.fieldCount);
}
}
exports.RowDescriptionMessage = RowDescriptionMessage;
class ParameterDescriptionMessage {
constructor(length, parameterCount) {
this.length = length;
this.parameterCount = parameterCount;
this.name = 'parameterDescription';
this.dataTypeIDs = new Array(this.parameterCount);
}
}
exports.ParameterDescriptionMessage = ParameterDescriptionMessage;
class ParameterStatusMessage {
constructor(length, parameterName, parameterValue) {
this.length = length;
this.parameterName = parameterName;
this.parameterValue = parameterValue;
this.name = 'parameterStatus';
}
}
exports.ParameterStatusMessage = ParameterStatusMessage;
class AuthenticationMD5Password {
constructor(length, salt) {
this.length = length;
this.salt = salt;
this.name = 'authenticationMD5Password';
}
}
exports.AuthenticationMD5Password = AuthenticationMD5Password;
class BackendKeyDataMessage {
constructor(length, processID, secretKey) {
this.length = length;
this.processID = processID;
this.secretKey = secretKey;
this.name = 'backendKeyData';
}
}
exports.BackendKeyDataMessage = BackendKeyDataMessage;
class NotificationResponseMessage {
constructor(length, processId, channel, payload) {
this.length = length;
this.processId = processId;
this.channel = channel;
this.payload = payload;
this.name = 'notification';
}
}
exports.NotificationResponseMessage = NotificationResponseMessage;
class ReadyForQueryMessage {
constructor(length, status) {
this.length = length;
this.status = status;
this.name = 'readyForQuery';
}
}
exports.ReadyForQueryMessage = ReadyForQueryMessage;
class CommandCompleteMessage {
constructor(length, text) {
this.length = length;
this.text = text;
this.name = 'commandComplete';
}
}
exports.CommandCompleteMessage = CommandCompleteMessage;
class DataRowMessage {
constructor(length, fields) {
this.length = length;
this.fields = fields;
this.name = 'dataRow';
this.fieldCount = fields.length;
}
}
exports.DataRowMessage = DataRowMessage;
class NoticeMessage {
constructor(length, message) {
this.length = length;
this.message = message;
this.name = 'notice';
}
}
exports.NoticeMessage = NoticeMessage;
//# sourceMappingURL=messages.js.map

1
node_modules/pg-protocol/dist/messages.js.map generated vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"messages.js","sourceRoot":"","sources":["../src/messages.ts"],"names":[],"mappings":";;;AAoCa,QAAA,aAAa,GAAmB;IAC3C,IAAI,EAAE,eAAe;IACrB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,YAAY,GAAmB;IAC1C,IAAI,EAAE,cAAc;IACpB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,aAAa,GAAmB;IAC3C,IAAI,EAAE,eAAe;IACrB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,MAAM,GAAmB;IACpC,IAAI,EAAE,QAAQ;IACd,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,eAAe,GAAmB;IAC7C,IAAI,EAAE,iBAAiB;IACvB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,gBAAgB,GAAmB;IAC9C,IAAI,EAAE,kBAAkB;IACxB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,UAAU,GAAmB;IACxC,IAAI,EAAE,YAAY;IAClB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,QAAQ,GAAmB;IACtC,IAAI,EAAE,UAAU;IAChB,MAAM,EAAE,CAAC;CACV,CAAA;AAsBD,MAAa,aAAc,SAAQ,KAAK;IAiBtC,YACE,OAAe,EACC,MAAc,EACd,IAAiB;QAEjC,KAAK,CAAC,OAAO,CAAC,CAAA;QAHE,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAa;IAGnC,CAAC;CACF;AAxBD,sCAwBC;AAED,MAAa,eAAe;IAE1B,YACkB,MAAc,EACd,KAAa;QADb,WAAM,GAAN,MAAM,CAAQ;QACd,UAAK,GAAL,KAAK,CAAQ;QAHf,SAAI,GAAG,UAAU,CAAA;IAI9B,CAAC;CACL;AAND,0CAMC;AAED,MAAa,YAAY;IAEvB,YACkB,MAAc,EACd,IAAiB,EACjB,MAAe,EAC/B,WAAmB;QAHH,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAa;QACjB,WAAM,GAAN,MAAM,CAAS;QAG/B,IAAI,CAAC,WAAW,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,CAAA;IAC3C,CAAC;CACF;AAVD,oCAUC;AAED,MAAa,KAAK;IAChB,YACkB,IAAY,EACZ,OAAe,EACf,QAAgB,EAChB,UAAkB,EAClB,YAAoB,EACpB,gBAAwB,EACxB,MAAY;QANZ,SAAI,GAAJ,IAAI,CAAQ;QACZ,YAAO,GAAP,OAAO,CAAQ;QACf,aAAQ,GAAR,QAAQ,CAAQ;QAChB,eAAU,GAAV,UAAU,CAAQ;QAClB,iBAAY,GAAZ,YAAY,CAAQ;QACpB,qBAAgB,GAAhB,gBAAgB,CAAQ;QACxB,WAAM,GAAN,MAAM,CAAM;IAC3B,CAAC;CACL;AAVD,sBAUC;AAED,MAAa,qBAAqB;IAGhC,YACkB,MAAc,EACd,UAAkB;QADlB,WAAM,GAAN,MAAM,CAAQ;QACd,eAAU,GAAV,UAAU,CAAQ;QAJpB,SAAI,GAAgB,gBAAgB,CAAA;QAMlD,IAAI,CAAC,MAAM,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,CAAA;IAC1C,CAAC;CACF;AATD,sDASC;AAED,MAAa,2BAA2B;IAGtC,YACkB,MAAc,EACd,cAAsB;QADtB,WAAM,GAAN,MAAM,CAAQ;QACd,mBAAc,GAAd,cAAc,CAAQ;QAJxB,SAAI,GAAgB,sBAAsB,CAAA;QAMxD,IAAI,CAAC,WAAW,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC,CAAA;IACnD,CAAC;CACF;AATD,kEASC;AAED,MAAa,sBAAsB;IAEjC,YACkB,MAAc,EACd,aAAqB,EACrB,cAAsB;QAFtB,WAAM,GAAN,MAAM,CAAQ;QACd,kBAAa,GAAb,aAAa,CAAQ;QACrB,mBAAc,GAAd,cAAc,CAAQ;QAJxB,SAAI,GAAgB,iBAAiB,CAAA;IAKlD,CAAC;CACL;AAPD,wDAOC;AAED,MAAa,yBAAyB;IAEpC,YACkB,MAAc,EACd,IAAY;QADZ,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAQ;QAHd,SAAI,GAAgB,2BAA2B,CAAA;IAI5D,CAAC;CACL;AAND,8DAMC;AAED,MAAa,qBAAqB;IAEhC,YACkB,MAAc,EACd,SAAiB,EACjB,SAAiB;QAFjB,WAAM,GAAN,MAAM,CAAQ;QACd,cAAS,GAAT,SAAS,CAAQ;QACjB,cAAS,GAAT,SAAS,CAAQ;QAJnB,SAAI,GAAgB,gBAAgB,CAAA;IAKjD,CAAC;CACL;AAPD,sDAOC;AAED,MAAa,2BAA2B;IAEtC,YACkB,MAAc,EACd,SAAiB,EACjB,OAAe,EACf,OAAe;QAHf,WAAM,GAAN,MAAM,CAAQ;QACd,cAAS,GAAT,SAAS,CAAQ;QACjB,YAAO,GAAP,OAAO,CAAQ;QACf,YAAO,GAAP,OAAO,CAAQ;QALjB,SAAI,GAAgB,cAAc,CAAA;IAM/C,CAAC;CACL;AARD,kEAQC;AAED,MAAa,oBAAoB;IAE/B,YACkB,MAAc,EACd,MAAc;QADd,WAAM,GAAN,MAAM,CAAQ;QACd,WAAM,GAAN,MAAM,CAAQ;QAHhB,SAAI,GAAgB,eAAe,CAAA;IAIhD,CAAC;CACL;AAND,oDAMC;AAED,MAAa,sBAAsB;IAEjC,YACkB,MAAc,EACd,IAAY;QADZ,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAQ;QAHd,SAAI,GAAgB,iBAAiB,CAAA;IAIlD,CAAC;CACL;AAND,wDAMC;AAED,MAAa,cAAc;IAGzB,YACS,MAAc,EACd,MAAa;QADb,WAAM,GAAN,MAAM,CAAQ;QACd,WAAM,GAAN,MAAM,CAAO;QAHN,SAAI,GAAgB,SAAS,CAAA;QAK3C,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,MAAM,CAAA;IACjC,CAAC;CACF;AATD,wCASC;AAED,MAAa,aAAa;IACxB,YACkB,MAAc,EACd,OAA2B;QAD3B,WAAM,GAAN,MAAM,CAAQ;QACd,YAAO,GAAP,OAAO,CAAoB;QAE7B,SAAI,GAAG,QAAQ,CAAA;IAD5B,CAAC;CAkBL;AAtBD,sCAsBC"}

View File

@ -0,0 +1 @@
export {};

View File

@ -0,0 +1,252 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const assert_1 = __importDefault(require("assert"));
const serializer_1 = require("./serializer");
const buffer_list_1 = __importDefault(require("./testing/buffer-list"));
describe('serializer', () => {
it('builds startup message', function () {
const actual = serializer_1.serialize.startup({
user: 'brian',
database: 'bang',
});
assert_1.default.deepEqual(actual, new buffer_list_1.default()
.addInt16(3)
.addInt16(0)
.addCString('user')
.addCString('brian')
.addCString('database')
.addCString('bang')
.addCString('client_encoding')
.addCString('UTF8')
.addCString('')
.join(true));
});
it('builds password message', function () {
const actual = serializer_1.serialize.password('!');
assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString('!').join(true, 'p'));
});
it('builds request ssl message', function () {
const actual = serializer_1.serialize.requestSsl();
const expected = new buffer_list_1.default().addInt32(80877103).join(true);
assert_1.default.deepEqual(actual, expected);
});
it('builds SASLInitialResponseMessage message', function () {
const actual = serializer_1.serialize.sendSASLInitialResponseMessage('mech', 'data');
assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString('mech').addInt32(4).addString('data').join(true, 'p'));
});
it('builds SCRAMClientFinalMessage message', function () {
const actual = serializer_1.serialize.sendSCRAMClientFinalMessage('data');
assert_1.default.deepEqual(actual, new buffer_list_1.default().addString('data').join(true, 'p'));
});
it('builds query message', function () {
const txt = 'select * from boom';
const actual = serializer_1.serialize.query(txt);
assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString(txt).join(true, 'Q'));
});
describe('parse message', () => {
it('builds parse message', function () {
const actual = serializer_1.serialize.parse({ text: '!' });
const expected = new buffer_list_1.default().addCString('').addCString('!').addInt16(0).join(true, 'P');
assert_1.default.deepEqual(actual, expected);
});
it('builds parse message with named query', function () {
const actual = serializer_1.serialize.parse({
name: 'boom',
text: 'select * from boom',
types: [],
});
const expected = new buffer_list_1.default().addCString('boom').addCString('select * from boom').addInt16(0).join(true, 'P');
assert_1.default.deepEqual(actual, expected);
});
it('with multiple parameters', function () {
const actual = serializer_1.serialize.parse({
name: 'force',
text: 'select * from bang where name = $1',
types: [1, 2, 3, 4],
});
const expected = new buffer_list_1.default()
.addCString('force')
.addCString('select * from bang where name = $1')
.addInt16(4)
.addInt32(1)
.addInt32(2)
.addInt32(3)
.addInt32(4)
.join(true, 'P');
assert_1.default.deepEqual(actual, expected);
});
});
describe('bind messages', function () {
it('with no values', function () {
const actual = serializer_1.serialize.bind();
const expectedBuffer = new buffer_list_1.default()
.addCString('')
.addCString('')
.addInt16(0)
.addInt16(0)
.addInt16(1)
.addInt16(0)
.join(true, 'B');
assert_1.default.deepEqual(actual, expectedBuffer);
});
it('with named statement, portal, and values', function () {
const actual = serializer_1.serialize.bind({
portal: 'bang',
statement: 'woo',
values: ['1', 'hi', null, 'zing'],
});
const expectedBuffer = new buffer_list_1.default()
.addCString('bang') // portal name
.addCString('woo') // statement name
.addInt16(4)
.addInt16(0)
.addInt16(0)
.addInt16(0)
.addInt16(0)
.addInt16(4)
.addInt32(1)
.add(Buffer.from('1'))
.addInt32(2)
.add(Buffer.from('hi'))
.addInt32(-1)
.addInt32(4)
.add(Buffer.from('zing'))
.addInt16(1)
.addInt16(0)
.join(true, 'B');
assert_1.default.deepEqual(actual, expectedBuffer);
});
});
it('with custom valueMapper', function () {
const actual = serializer_1.serialize.bind({
portal: 'bang',
statement: 'woo',
values: ['1', 'hi', null, 'zing'],
valueMapper: () => null,
});
const expectedBuffer = new buffer_list_1.default()
.addCString('bang') // portal name
.addCString('woo') // statement name
.addInt16(4)
.addInt16(0)
.addInt16(0)
.addInt16(0)
.addInt16(0)
.addInt16(4)
.addInt32(-1)
.addInt32(-1)
.addInt32(-1)
.addInt32(-1)
.addInt16(1)
.addInt16(0)
.join(true, 'B');
assert_1.default.deepEqual(actual, expectedBuffer);
});
it('with named statement, portal, and buffer value', function () {
const actual = serializer_1.serialize.bind({
portal: 'bang',
statement: 'woo',
values: ['1', 'hi', null, Buffer.from('zing', 'utf8')],
});
const expectedBuffer = new buffer_list_1.default()
.addCString('bang') // portal name
.addCString('woo') // statement name
.addInt16(4) // value count
.addInt16(0) // string
.addInt16(0) // string
.addInt16(0) // string
.addInt16(1) // binary
.addInt16(4)
.addInt32(1)
.add(Buffer.from('1'))
.addInt32(2)
.add(Buffer.from('hi'))
.addInt32(-1)
.addInt32(4)
.add(Buffer.from('zing', 'utf-8'))
.addInt16(1)
.addInt16(0)
.join(true, 'B');
assert_1.default.deepEqual(actual, expectedBuffer);
});
describe('builds execute message', function () {
it('for unamed portal with no row limit', function () {
const actual = serializer_1.serialize.execute();
const expectedBuffer = new buffer_list_1.default().addCString('').addInt32(0).join(true, 'E');
assert_1.default.deepEqual(actual, expectedBuffer);
});
it('for named portal with row limit', function () {
const actual = serializer_1.serialize.execute({
portal: 'my favorite portal',
rows: 100,
});
const expectedBuffer = new buffer_list_1.default().addCString('my favorite portal').addInt32(100).join(true, 'E');
assert_1.default.deepEqual(actual, expectedBuffer);
});
});
it('builds flush command', function () {
const actual = serializer_1.serialize.flush();
const expected = new buffer_list_1.default().join(true, 'H');
assert_1.default.deepEqual(actual, expected);
});
it('builds sync command', function () {
const actual = serializer_1.serialize.sync();
const expected = new buffer_list_1.default().join(true, 'S');
assert_1.default.deepEqual(actual, expected);
});
it('builds end command', function () {
const actual = serializer_1.serialize.end();
const expected = Buffer.from([0x58, 0, 0, 0, 4]);
assert_1.default.deepEqual(actual, expected);
});
describe('builds describe command', function () {
it('describe statement', function () {
const actual = serializer_1.serialize.describe({ type: 'S', name: 'bang' });
const expected = new buffer_list_1.default().addChar('S').addCString('bang').join(true, 'D');
assert_1.default.deepEqual(actual, expected);
});
it('describe unnamed portal', function () {
const actual = serializer_1.serialize.describe({ type: 'P' });
const expected = new buffer_list_1.default().addChar('P').addCString('').join(true, 'D');
assert_1.default.deepEqual(actual, expected);
});
});
describe('builds close command', function () {
it('describe statement', function () {
const actual = serializer_1.serialize.close({ type: 'S', name: 'bang' });
const expected = new buffer_list_1.default().addChar('S').addCString('bang').join(true, 'C');
assert_1.default.deepEqual(actual, expected);
});
it('describe unnamed portal', function () {
const actual = serializer_1.serialize.close({ type: 'P' });
const expected = new buffer_list_1.default().addChar('P').addCString('').join(true, 'C');
assert_1.default.deepEqual(actual, expected);
});
});
describe('copy messages', function () {
it('builds copyFromChunk', () => {
const actual = serializer_1.serialize.copyData(Buffer.from([1, 2, 3]));
const expected = new buffer_list_1.default().add(Buffer.from([1, 2, 3])).join(true, 'd');
assert_1.default.deepEqual(actual, expected);
});
it('builds copy fail', () => {
const actual = serializer_1.serialize.copyFail('err!');
const expected = new buffer_list_1.default().addCString('err!').join(true, 'f');
assert_1.default.deepEqual(actual, expected);
});
it('builds copy done', () => {
const actual = serializer_1.serialize.copyDone();
const expected = new buffer_list_1.default().join(true, 'c');
assert_1.default.deepEqual(actual, expected);
});
});
it('builds cancel message', () => {
const actual = serializer_1.serialize.cancel(3, 4);
const expected = new buffer_list_1.default().addInt16(1234).addInt16(5678).addInt32(3).addInt32(4).join(true);
assert_1.default.deepEqual(actual, expected);
});
});
//# sourceMappingURL=outbound-serializer.test.js.map

File diff suppressed because one or more lines are too long

39
node_modules/pg-protocol/dist/parser.d.ts generated vendored Normal file
View File

@ -0,0 +1,39 @@
/// <reference types="node" />
/// <reference types="node" />
import { TransformOptions } from 'stream';
import { Mode, BackendMessage } from './messages';
export declare type Packet = {
code: number;
packet: Buffer;
};
declare type StreamOptions = TransformOptions & {
mode: Mode;
};
export declare type MessageCallback = (msg: BackendMessage) => void;
export declare class Parser {
private buffer;
private bufferLength;
private bufferOffset;
private reader;
private mode;
constructor(opts?: StreamOptions);
parse(buffer: Buffer, callback: MessageCallback): void;
private mergeBuffer;
private handlePacket;
private parseReadyForQueryMessage;
private parseCommandCompleteMessage;
private parseCopyData;
private parseCopyInMessage;
private parseCopyOutMessage;
private parseCopyMessage;
private parseNotificationMessage;
private parseRowDescriptionMessage;
private parseField;
private parseParameterDescriptionMessage;
private parseDataRowMessage;
private parseParameterStatusMessage;
private parseBackendKeyData;
parseAuthenticationResponse(offset: number, length: number, bytes: Buffer): any;
private parseErrorMessage;
}
export {};

306
node_modules/pg-protocol/dist/parser.js generated vendored Normal file
View File

@ -0,0 +1,306 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Parser = void 0;
const messages_1 = require("./messages");
const buffer_reader_1 = require("./buffer-reader");
// every message is prefixed with a single bye
const CODE_LENGTH = 1;
// every message has an int32 length which includes itself but does
// NOT include the code in the length
const LEN_LENGTH = 4;
const HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH;
const emptyBuffer = Buffer.allocUnsafe(0);
class Parser {
constructor(opts) {
this.buffer = emptyBuffer;
this.bufferLength = 0;
this.bufferOffset = 0;
this.reader = new buffer_reader_1.BufferReader();
if ((opts === null || opts === void 0 ? void 0 : opts.mode) === 'binary') {
throw new Error('Binary mode not supported yet');
}
this.mode = (opts === null || opts === void 0 ? void 0 : opts.mode) || 'text';
}
parse(buffer, callback) {
this.mergeBuffer(buffer);
const bufferFullLength = this.bufferOffset + this.bufferLength;
let offset = this.bufferOffset;
while (offset + HEADER_LENGTH <= bufferFullLength) {
// code is 1 byte long - it identifies the message type
const code = this.buffer[offset];
// length is 1 Uint32BE - it is the length of the message EXCLUDING the code
const length = this.buffer.readUInt32BE(offset + CODE_LENGTH);
const fullMessageLength = CODE_LENGTH + length;
if (fullMessageLength + offset <= bufferFullLength) {
const message = this.handlePacket(offset + HEADER_LENGTH, code, length, this.buffer);
callback(message);
offset += fullMessageLength;
}
else {
break;
}
}
if (offset === bufferFullLength) {
// No more use for the buffer
this.buffer = emptyBuffer;
this.bufferLength = 0;
this.bufferOffset = 0;
}
else {
// Adjust the cursors of remainingBuffer
this.bufferLength = bufferFullLength - offset;
this.bufferOffset = offset;
}
}
mergeBuffer(buffer) {
if (this.bufferLength > 0) {
const newLength = this.bufferLength + buffer.byteLength;
const newFullLength = newLength + this.bufferOffset;
if (newFullLength > this.buffer.byteLength) {
// We can't concat the new buffer with the remaining one
let newBuffer;
if (newLength <= this.buffer.byteLength && this.bufferOffset >= this.bufferLength) {
// We can move the relevant part to the beginning of the buffer instead of allocating a new buffer
newBuffer = this.buffer;
}
else {
// Allocate a new larger buffer
let newBufferLength = this.buffer.byteLength * 2;
while (newLength >= newBufferLength) {
newBufferLength *= 2;
}
newBuffer = Buffer.allocUnsafe(newBufferLength);
}
// Move the remaining buffer to the new one
this.buffer.copy(newBuffer, 0, this.bufferOffset, this.bufferOffset + this.bufferLength);
this.buffer = newBuffer;
this.bufferOffset = 0;
}
// Concat the new buffer with the remaining one
buffer.copy(this.buffer, this.bufferOffset + this.bufferLength);
this.bufferLength = newLength;
}
else {
this.buffer = buffer;
this.bufferOffset = 0;
this.bufferLength = buffer.byteLength;
}
}
handlePacket(offset, code, length, bytes) {
switch (code) {
case 50 /* MessageCodes.BindComplete */:
return messages_1.bindComplete;
case 49 /* MessageCodes.ParseComplete */:
return messages_1.parseComplete;
case 51 /* MessageCodes.CloseComplete */:
return messages_1.closeComplete;
case 110 /* MessageCodes.NoData */:
return messages_1.noData;
case 115 /* MessageCodes.PortalSuspended */:
return messages_1.portalSuspended;
case 99 /* MessageCodes.CopyDone */:
return messages_1.copyDone;
case 87 /* MessageCodes.ReplicationStart */:
return messages_1.replicationStart;
case 73 /* MessageCodes.EmptyQuery */:
return messages_1.emptyQuery;
case 68 /* MessageCodes.DataRow */:
return this.parseDataRowMessage(offset, length, bytes);
case 67 /* MessageCodes.CommandComplete */:
return this.parseCommandCompleteMessage(offset, length, bytes);
case 90 /* MessageCodes.ReadyForQuery */:
return this.parseReadyForQueryMessage(offset, length, bytes);
case 65 /* MessageCodes.NotificationResponse */:
return this.parseNotificationMessage(offset, length, bytes);
case 82 /* MessageCodes.AuthenticationResponse */:
return this.parseAuthenticationResponse(offset, length, bytes);
case 83 /* MessageCodes.ParameterStatus */:
return this.parseParameterStatusMessage(offset, length, bytes);
case 75 /* MessageCodes.BackendKeyData */:
return this.parseBackendKeyData(offset, length, bytes);
case 69 /* MessageCodes.ErrorMessage */:
return this.parseErrorMessage(offset, length, bytes, 'error');
case 78 /* MessageCodes.NoticeMessage */:
return this.parseErrorMessage(offset, length, bytes, 'notice');
case 84 /* MessageCodes.RowDescriptionMessage */:
return this.parseRowDescriptionMessage(offset, length, bytes);
case 116 /* MessageCodes.ParameterDescriptionMessage */:
return this.parseParameterDescriptionMessage(offset, length, bytes);
case 71 /* MessageCodes.CopyIn */:
return this.parseCopyInMessage(offset, length, bytes);
case 72 /* MessageCodes.CopyOut */:
return this.parseCopyOutMessage(offset, length, bytes);
case 100 /* MessageCodes.CopyData */:
return this.parseCopyData(offset, length, bytes);
default:
return new messages_1.DatabaseError('received invalid response: ' + code.toString(16), length, 'error');
}
}
parseReadyForQueryMessage(offset, length, bytes) {
this.reader.setBuffer(offset, bytes);
const status = this.reader.string(1);
return new messages_1.ReadyForQueryMessage(length, status);
}
parseCommandCompleteMessage(offset, length, bytes) {
this.reader.setBuffer(offset, bytes);
const text = this.reader.cstring();
return new messages_1.CommandCompleteMessage(length, text);
}
parseCopyData(offset, length, bytes) {
const chunk = bytes.slice(offset, offset + (length - 4));
return new messages_1.CopyDataMessage(length, chunk);
}
parseCopyInMessage(offset, length, bytes) {
return this.parseCopyMessage(offset, length, bytes, 'copyInResponse');
}
parseCopyOutMessage(offset, length, bytes) {
return this.parseCopyMessage(offset, length, bytes, 'copyOutResponse');
}
parseCopyMessage(offset, length, bytes, messageName) {
this.reader.setBuffer(offset, bytes);
const isBinary = this.reader.byte() !== 0;
const columnCount = this.reader.int16();
const message = new messages_1.CopyResponse(length, messageName, isBinary, columnCount);
for (let i = 0; i < columnCount; i++) {
message.columnTypes[i] = this.reader.int16();
}
return message;
}
parseNotificationMessage(offset, length, bytes) {
this.reader.setBuffer(offset, bytes);
const processId = this.reader.int32();
const channel = this.reader.cstring();
const payload = this.reader.cstring();
return new messages_1.NotificationResponseMessage(length, processId, channel, payload);
}
parseRowDescriptionMessage(offset, length, bytes) {
this.reader.setBuffer(offset, bytes);
const fieldCount = this.reader.int16();
const message = new messages_1.RowDescriptionMessage(length, fieldCount);
for (let i = 0; i < fieldCount; i++) {
message.fields[i] = this.parseField();
}
return message;
}
parseField() {
const name = this.reader.cstring();
const tableID = this.reader.uint32();
const columnID = this.reader.int16();
const dataTypeID = this.reader.uint32();
const dataTypeSize = this.reader.int16();
const dataTypeModifier = this.reader.int32();
const mode = this.reader.int16() === 0 ? 'text' : 'binary';
return new messages_1.Field(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, mode);
}
parseParameterDescriptionMessage(offset, length, bytes) {
this.reader.setBuffer(offset, bytes);
const parameterCount = this.reader.int16();
const message = new messages_1.ParameterDescriptionMessage(length, parameterCount);
for (let i = 0; i < parameterCount; i++) {
message.dataTypeIDs[i] = this.reader.int32();
}
return message;
}
parseDataRowMessage(offset, length, bytes) {
this.reader.setBuffer(offset, bytes);
const fieldCount = this.reader.int16();
const fields = new Array(fieldCount);
for (let i = 0; i < fieldCount; i++) {
const len = this.reader.int32();
// a -1 for length means the value of the field is null
fields[i] = len === -1 ? null : this.reader.string(len);
}
return new messages_1.DataRowMessage(length, fields);
}
parseParameterStatusMessage(offset, length, bytes) {
this.reader.setBuffer(offset, bytes);
const name = this.reader.cstring();
const value = this.reader.cstring();
return new messages_1.ParameterStatusMessage(length, name, value);
}
parseBackendKeyData(offset, length, bytes) {
this.reader.setBuffer(offset, bytes);
const processID = this.reader.int32();
const secretKey = this.reader.int32();
return new messages_1.BackendKeyDataMessage(length, processID, secretKey);
}
parseAuthenticationResponse(offset, length, bytes) {
this.reader.setBuffer(offset, bytes);
const code = this.reader.int32();
// TODO(bmc): maybe better types here
const message = {
name: 'authenticationOk',
length,
};
switch (code) {
case 0: // AuthenticationOk
break;
case 3: // AuthenticationCleartextPassword
if (message.length === 8) {
message.name = 'authenticationCleartextPassword';
}
break;
case 5: // AuthenticationMD5Password
if (message.length === 12) {
message.name = 'authenticationMD5Password';
const salt = this.reader.bytes(4);
return new messages_1.AuthenticationMD5Password(length, salt);
}
break;
case 10: // AuthenticationSASL
{
message.name = 'authenticationSASL';
message.mechanisms = [];
let mechanism;
do {
mechanism = this.reader.cstring();
if (mechanism) {
message.mechanisms.push(mechanism);
}
} while (mechanism);
}
break;
case 11: // AuthenticationSASLContinue
message.name = 'authenticationSASLContinue';
message.data = this.reader.string(length - 8);
break;
case 12: // AuthenticationSASLFinal
message.name = 'authenticationSASLFinal';
message.data = this.reader.string(length - 8);
break;
default:
throw new Error('Unknown authenticationOk message type ' + code);
}
return message;
}
parseErrorMessage(offset, length, bytes, name) {
this.reader.setBuffer(offset, bytes);
const fields = {};
let fieldType = this.reader.string(1);
while (fieldType !== '\0') {
fields[fieldType] = this.reader.cstring();
fieldType = this.reader.string(1);
}
const messageValue = fields.M;
const message = name === 'notice' ? new messages_1.NoticeMessage(length, messageValue) : new messages_1.DatabaseError(messageValue, length, name);
message.severity = fields.S;
message.code = fields.C;
message.detail = fields.D;
message.hint = fields.H;
message.position = fields.P;
message.internalPosition = fields.p;
message.internalQuery = fields.q;
message.where = fields.W;
message.schema = fields.s;
message.table = fields.t;
message.column = fields.c;
message.dataType = fields.d;
message.constraint = fields.n;
message.file = fields.F;
message.line = fields.L;
message.routine = fields.R;
return message;
}
}
exports.Parser = Parser;
//# sourceMappingURL=parser.js.map

1
node_modules/pg-protocol/dist/parser.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

42
node_modules/pg-protocol/dist/serializer.d.ts generated vendored Normal file
View File

@ -0,0 +1,42 @@
declare type ParseOpts = {
name?: string;
types?: number[];
text: string;
};
declare type ValueMapper = (param: any, index: number) => any;
declare type BindOpts = {
portal?: string;
binary?: boolean;
statement?: string;
values?: any[];
valueMapper?: ValueMapper;
};
declare type ExecOpts = {
portal?: string;
rows?: number;
};
declare type PortalOpts = {
type: 'S' | 'P';
name?: string;
};
declare const serialize: {
startup: (opts: Record<string, string>) => Buffer;
password: (password: string) => Buffer;
requestSsl: () => Buffer;
sendSASLInitialResponseMessage: (mechanism: string, initialResponse: string) => Buffer;
sendSCRAMClientFinalMessage: (additionalData: string) => Buffer;
query: (text: string) => Buffer;
parse: (query: ParseOpts) => Buffer;
bind: (config?: BindOpts) => Buffer;
execute: (config?: ExecOpts) => Buffer;
describe: (msg: PortalOpts) => Buffer;
close: (msg: PortalOpts) => Buffer;
flush: () => Buffer;
sync: () => Buffer;
end: () => Buffer;
copyData: (chunk: Buffer) => Buffer;
copyDone: () => Buffer;
copyFail: (message: string) => Buffer;
cancel: (processID: number, secretKey: number) => Buffer;
};
export { serialize };

189
node_modules/pg-protocol/dist/serializer.js generated vendored Normal file
View File

@ -0,0 +1,189 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.serialize = void 0;
const buffer_writer_1 = require("./buffer-writer");
const writer = new buffer_writer_1.Writer();
const startup = (opts) => {
// protocol version
writer.addInt16(3).addInt16(0);
for (const key of Object.keys(opts)) {
writer.addCString(key).addCString(opts[key]);
}
writer.addCString('client_encoding').addCString('UTF8');
const bodyBuffer = writer.addCString('').flush();
// this message is sent without a code
const length = bodyBuffer.length + 4;
return new buffer_writer_1.Writer().addInt32(length).add(bodyBuffer).flush();
};
const requestSsl = () => {
const response = Buffer.allocUnsafe(8);
response.writeInt32BE(8, 0);
response.writeInt32BE(80877103, 4);
return response;
};
const password = (password) => {
return writer.addCString(password).flush(112 /* code.startup */);
};
const sendSASLInitialResponseMessage = function (mechanism, initialResponse) {
// 0x70 = 'p'
writer.addCString(mechanism).addInt32(Buffer.byteLength(initialResponse)).addString(initialResponse);
return writer.flush(112 /* code.startup */);
};
const sendSCRAMClientFinalMessage = function (additionalData) {
return writer.addString(additionalData).flush(112 /* code.startup */);
};
const query = (text) => {
return writer.addCString(text).flush(81 /* code.query */);
};
const emptyArray = [];
const parse = (query) => {
// expect something like this:
// { name: 'queryName',
// text: 'select * from blah',
// types: ['int8', 'bool'] }
// normalize missing query names to allow for null
const name = query.name || '';
if (name.length > 63) {
console.error('Warning! Postgres only supports 63 characters for query names.');
console.error('You supplied %s (%s)', name, name.length);
console.error('This can cause conflicts and silent errors executing queries');
}
const types = query.types || emptyArray;
const len = types.length;
const buffer = writer
.addCString(name) // name of query
.addCString(query.text) // actual query text
.addInt16(len);
for (let i = 0; i < len; i++) {
buffer.addInt32(types[i]);
}
return writer.flush(80 /* code.parse */);
};
const paramWriter = new buffer_writer_1.Writer();
const writeValues = function (values, valueMapper) {
for (let i = 0; i < values.length; i++) {
const mappedVal = valueMapper ? valueMapper(values[i], i) : values[i];
if (mappedVal == null) {
// add the param type (string) to the writer
writer.addInt16(0 /* ParamType.STRING */);
// write -1 to the param writer to indicate null
paramWriter.addInt32(-1);
}
else if (mappedVal instanceof Buffer) {
// add the param type (binary) to the writer
writer.addInt16(1 /* ParamType.BINARY */);
// add the buffer to the param writer
paramWriter.addInt32(mappedVal.length);
paramWriter.add(mappedVal);
}
else {
// add the param type (string) to the writer
writer.addInt16(0 /* ParamType.STRING */);
paramWriter.addInt32(Buffer.byteLength(mappedVal));
paramWriter.addString(mappedVal);
}
}
};
const bind = (config = {}) => {
// normalize config
const portal = config.portal || '';
const statement = config.statement || '';
const binary = config.binary || false;
const values = config.values || emptyArray;
const len = values.length;
writer.addCString(portal).addCString(statement);
writer.addInt16(len);
writeValues(values, config.valueMapper);
writer.addInt16(len);
writer.add(paramWriter.flush());
// all results use the same format code
writer.addInt16(1);
// format code
writer.addInt16(binary ? 1 /* ParamType.BINARY */ : 0 /* ParamType.STRING */);
return writer.flush(66 /* code.bind */);
};
const emptyExecute = Buffer.from([69 /* code.execute */, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00]);
const execute = (config) => {
// this is the happy path for most queries
if (!config || (!config.portal && !config.rows)) {
return emptyExecute;
}
const portal = config.portal || '';
const rows = config.rows || 0;
const portalLength = Buffer.byteLength(portal);
const len = 4 + portalLength + 1 + 4;
// one extra bit for code
const buff = Buffer.allocUnsafe(1 + len);
buff[0] = 69 /* code.execute */;
buff.writeInt32BE(len, 1);
buff.write(portal, 5, 'utf-8');
buff[portalLength + 5] = 0; // null terminate portal cString
buff.writeUInt32BE(rows, buff.length - 4);
return buff;
};
const cancel = (processID, secretKey) => {
const buffer = Buffer.allocUnsafe(16);
buffer.writeInt32BE(16, 0);
buffer.writeInt16BE(1234, 4);
buffer.writeInt16BE(5678, 6);
buffer.writeInt32BE(processID, 8);
buffer.writeInt32BE(secretKey, 12);
return buffer;
};
const cstringMessage = (code, string) => {
const stringLen = Buffer.byteLength(string);
const len = 4 + stringLen + 1;
// one extra bit for code
const buffer = Buffer.allocUnsafe(1 + len);
buffer[0] = code;
buffer.writeInt32BE(len, 1);
buffer.write(string, 5, 'utf-8');
buffer[len] = 0; // null terminate cString
return buffer;
};
const emptyDescribePortal = writer.addCString('P').flush(68 /* code.describe */);
const emptyDescribeStatement = writer.addCString('S').flush(68 /* code.describe */);
const describe = (msg) => {
return msg.name
? cstringMessage(68 /* code.describe */, `${msg.type}${msg.name || ''}`)
: msg.type === 'P'
? emptyDescribePortal
: emptyDescribeStatement;
};
const close = (msg) => {
const text = `${msg.type}${msg.name || ''}`;
return cstringMessage(67 /* code.close */, text);
};
const copyData = (chunk) => {
return writer.add(chunk).flush(100 /* code.copyFromChunk */);
};
const copyFail = (message) => {
return cstringMessage(102 /* code.copyFail */, message);
};
const codeOnlyBuffer = (code) => Buffer.from([code, 0x00, 0x00, 0x00, 0x04]);
const flushBuffer = codeOnlyBuffer(72 /* code.flush */);
const syncBuffer = codeOnlyBuffer(83 /* code.sync */);
const endBuffer = codeOnlyBuffer(88 /* code.end */);
const copyDoneBuffer = codeOnlyBuffer(99 /* code.copyDone */);
const serialize = {
startup,
password,
requestSsl,
sendSASLInitialResponseMessage,
sendSCRAMClientFinalMessage,
query,
parse,
bind,
execute,
describe,
close,
flush: () => flushBuffer,
sync: () => syncBuffer,
end: () => endBuffer,
copyData,
copyDone: () => copyDoneBuffer,
copyFail,
cancel,
};
exports.serialize = serialize;
//# sourceMappingURL=serializer.js.map

1
node_modules/pg-protocol/dist/serializer.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

11
node_modules/pg-protocol/esm/index.js generated vendored Normal file
View File

@ -0,0 +1,11 @@
// ESM wrapper for pg-protocol
import * as protocol from '../dist/index.js'
// Re-export all the properties
export const DatabaseError = protocol.DatabaseError
export const SASL = protocol.SASL
export const serialize = protocol.serialize
export const parse = protocol.parse
// Re-export the default
export default protocol

45
node_modules/pg-protocol/package.json generated vendored Normal file
View File

@ -0,0 +1,45 @@
{
"name": "pg-protocol",
"version": "1.10.3",
"description": "The postgres client/server binary protocol, implemented in TypeScript",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"exports": {
".": {
"import": "./esm/index.js",
"require": "./dist/index.js",
"default": "./dist/index.js"
},
"./dist/*": "./dist/*.js",
"./dist/*.js": "./dist/*.js"
},
"license": "MIT",
"devDependencies": {
"@types/chai": "^4.2.7",
"@types/mocha": "^10.0.7",
"@types/node": "^12.12.21",
"chai": "^4.2.0",
"chunky": "^0.0.0",
"mocha": "^10.5.2",
"ts-node": "^8.5.4",
"typescript": "^4.0.3"
},
"scripts": {
"test": "mocha dist/**/*.test.js",
"build": "tsc",
"build:watch": "tsc --watch",
"prepublish": "yarn build",
"pretest": "yarn build"
},
"repository": {
"type": "git",
"url": "git://github.com/brianc/node-postgres.git",
"directory": "packages/pg-protocol"
},
"files": [
"/dist/*{js,ts,map}",
"/src",
"/esm"
],
"gitHead": "8f8e7315e8f7c1bb01e98fdb41c8c92585510782"
}

25
node_modules/pg-protocol/src/b.ts generated vendored Normal file
View File

@ -0,0 +1,25 @@
// file for microbenchmarking
import { BufferReader } from './buffer-reader'
const LOOPS = 1000
let count = 0
const start = performance.now()
const reader = new BufferReader()
const buffer = Buffer.from([33, 33, 33, 33, 33, 33, 33, 0])
const run = () => {
if (count > LOOPS) {
console.log(performance.now() - start)
return
}
count++
for (let i = 0; i < LOOPS; i++) {
reader.setBuffer(0, buffer)
reader.cstring()
}
setImmediate(run)
}
run()

60
node_modules/pg-protocol/src/buffer-reader.ts generated vendored Normal file
View File

@ -0,0 +1,60 @@
const emptyBuffer = Buffer.allocUnsafe(0)
export class BufferReader {
private buffer: Buffer = emptyBuffer
// TODO(bmc): support non-utf8 encoding?
private encoding: string = 'utf-8'
constructor(private offset: number = 0) {}
public setBuffer(offset: number, buffer: Buffer): void {
this.offset = offset
this.buffer = buffer
}
public int16(): number {
const result = this.buffer.readInt16BE(this.offset)
this.offset += 2
return result
}
public byte(): number {
const result = this.buffer[this.offset]
this.offset++
return result
}
public int32(): number {
const result = this.buffer.readInt32BE(this.offset)
this.offset += 4
return result
}
public uint32(): number {
const result = this.buffer.readUInt32BE(this.offset)
this.offset += 4
return result
}
public string(length: number): string {
const result = this.buffer.toString(this.encoding, this.offset, this.offset + length)
this.offset += length
return result
}
public cstring(): string {
const start = this.offset
let end = start
// eslint-disable-next-line no-empty
while (this.buffer[end++] !== 0) {}
this.offset = end
return this.buffer.toString(this.encoding, start, end - 1)
}
public bytes(length: number): Buffer {
const result = this.buffer.slice(this.offset, this.offset + length)
this.offset += length
return result
}
}

85
node_modules/pg-protocol/src/buffer-writer.ts generated vendored Normal file
View File

@ -0,0 +1,85 @@
//binary data writer tuned for encoding binary specific to the postgres binary protocol
export class Writer {
private buffer: Buffer
private offset: number = 5
private headerPosition: number = 0
constructor(private size = 256) {
this.buffer = Buffer.allocUnsafe(size)
}
private ensure(size: number): void {
const remaining = this.buffer.length - this.offset
if (remaining < size) {
const oldBuffer = this.buffer
// exponential growth factor of around ~ 1.5
// https://stackoverflow.com/questions/2269063/buffer-growth-strategy
const newSize = oldBuffer.length + (oldBuffer.length >> 1) + size
this.buffer = Buffer.allocUnsafe(newSize)
oldBuffer.copy(this.buffer)
}
}
public addInt32(num: number): Writer {
this.ensure(4)
this.buffer[this.offset++] = (num >>> 24) & 0xff
this.buffer[this.offset++] = (num >>> 16) & 0xff
this.buffer[this.offset++] = (num >>> 8) & 0xff
this.buffer[this.offset++] = (num >>> 0) & 0xff
return this
}
public addInt16(num: number): Writer {
this.ensure(2)
this.buffer[this.offset++] = (num >>> 8) & 0xff
this.buffer[this.offset++] = (num >>> 0) & 0xff
return this
}
public addCString(string: string): Writer {
if (!string) {
this.ensure(1)
} else {
const len = Buffer.byteLength(string)
this.ensure(len + 1) // +1 for null terminator
this.buffer.write(string, this.offset, 'utf-8')
this.offset += len
}
this.buffer[this.offset++] = 0 // null terminator
return this
}
public addString(string: string = ''): Writer {
const len = Buffer.byteLength(string)
this.ensure(len)
this.buffer.write(string, this.offset)
this.offset += len
return this
}
public add(otherBuffer: Buffer): Writer {
this.ensure(otherBuffer.length)
otherBuffer.copy(this.buffer, this.offset)
this.offset += otherBuffer.length
return this
}
private join(code?: number): Buffer {
if (code) {
this.buffer[this.headerPosition] = code
//length is everything in this packet minus the code
const length = this.offset - (this.headerPosition + 1)
this.buffer.writeInt32BE(length, this.headerPosition + 1)
}
return this.buffer.slice(code ? 0 : 5, this.offset)
}
public flush(code?: number): Buffer {
const result = this.join(code)
this.offset = 5
this.headerPosition = 0
this.buffer = Buffer.allocUnsafe(this.size)
return result
}
}

568
node_modules/pg-protocol/src/inbound-parser.test.ts generated vendored Normal file
View File

@ -0,0 +1,568 @@
import buffers from './testing/test-buffers'
import BufferList from './testing/buffer-list'
import { parse } from '.'
import assert from 'assert'
import { PassThrough } from 'stream'
import { BackendMessage } from './messages'
const authOkBuffer = buffers.authenticationOk()
const paramStatusBuffer = buffers.parameterStatus('client_encoding', 'UTF8')
const readyForQueryBuffer = buffers.readyForQuery()
const backendKeyDataBuffer = buffers.backendKeyData(1, 2)
const commandCompleteBuffer = buffers.commandComplete('SELECT 3')
const parseCompleteBuffer = buffers.parseComplete()
const bindCompleteBuffer = buffers.bindComplete()
const portalSuspendedBuffer = buffers.portalSuspended()
const row1 = {
name: 'id',
tableID: 1,
attributeNumber: 2,
dataTypeID: 3,
dataTypeSize: 4,
typeModifier: 5,
formatCode: 0,
}
const oneRowDescBuff = buffers.rowDescription([row1])
row1.name = 'bang'
const twoRowBuf = buffers.rowDescription([
row1,
{
name: 'whoah',
tableID: 10,
attributeNumber: 11,
dataTypeID: 12,
dataTypeSize: 13,
typeModifier: 14,
formatCode: 0,
},
])
const rowWithBigOids = {
name: 'bigoid',
tableID: 3000000001,
attributeNumber: 2,
dataTypeID: 3000000003,
dataTypeSize: 4,
typeModifier: 5,
formatCode: 0,
}
const bigOidDescBuff = buffers.rowDescription([rowWithBigOids])
const emptyRowFieldBuf = buffers.dataRow([])
const oneFieldBuf = buffers.dataRow(['test'])
const expectedAuthenticationOkayMessage = {
name: 'authenticationOk',
length: 8,
}
const expectedParameterStatusMessage = {
name: 'parameterStatus',
parameterName: 'client_encoding',
parameterValue: 'UTF8',
length: 25,
}
const expectedBackendKeyDataMessage = {
name: 'backendKeyData',
processID: 1,
secretKey: 2,
}
const expectedReadyForQueryMessage = {
name: 'readyForQuery',
length: 5,
status: 'I',
}
const expectedCommandCompleteMessage = {
name: 'commandComplete',
length: 13,
text: 'SELECT 3',
}
const emptyRowDescriptionBuffer = new BufferList()
.addInt16(0) // number of fields
.join(true, 'T')
const expectedEmptyRowDescriptionMessage = {
name: 'rowDescription',
length: 6,
fieldCount: 0,
fields: [],
}
const expectedOneRowMessage = {
name: 'rowDescription',
length: 27,
fieldCount: 1,
fields: [
{
name: 'id',
tableID: 1,
columnID: 2,
dataTypeID: 3,
dataTypeSize: 4,
dataTypeModifier: 5,
format: 'text',
},
],
}
const expectedTwoRowMessage = {
name: 'rowDescription',
length: 53,
fieldCount: 2,
fields: [
{
name: 'bang',
tableID: 1,
columnID: 2,
dataTypeID: 3,
dataTypeSize: 4,
dataTypeModifier: 5,
format: 'text',
},
{
name: 'whoah',
tableID: 10,
columnID: 11,
dataTypeID: 12,
dataTypeSize: 13,
dataTypeModifier: 14,
format: 'text',
},
],
}
const expectedBigOidMessage = {
name: 'rowDescription',
length: 31,
fieldCount: 1,
fields: [
{
name: 'bigoid',
tableID: 3000000001,
columnID: 2,
dataTypeID: 3000000003,
dataTypeSize: 4,
dataTypeModifier: 5,
format: 'text',
},
],
}
const emptyParameterDescriptionBuffer = new BufferList()
.addInt16(0) // number of parameters
.join(true, 't')
const oneParameterDescBuf = buffers.parameterDescription([1111])
const twoParameterDescBuf = buffers.parameterDescription([2222, 3333])
const expectedEmptyParameterDescriptionMessage = {
name: 'parameterDescription',
length: 6,
parameterCount: 0,
dataTypeIDs: [],
}
const expectedOneParameterMessage = {
name: 'parameterDescription',
length: 10,
parameterCount: 1,
dataTypeIDs: [1111],
}
const expectedTwoParameterMessage = {
name: 'parameterDescription',
length: 14,
parameterCount: 2,
dataTypeIDs: [2222, 3333],
}
const testForMessage = function (buffer: Buffer, expectedMessage: any) {
it('receives and parses ' + expectedMessage.name, async () => {
const messages = await parseBuffers([buffer])
const [lastMessage] = messages
for (const key in expectedMessage) {
assert.deepEqual((lastMessage as any)[key], expectedMessage[key])
}
})
}
const plainPasswordBuffer = buffers.authenticationCleartextPassword()
const md5PasswordBuffer = buffers.authenticationMD5Password()
const SASLBuffer = buffers.authenticationSASL()
const SASLContinueBuffer = buffers.authenticationSASLContinue()
const SASLFinalBuffer = buffers.authenticationSASLFinal()
const expectedPlainPasswordMessage = {
name: 'authenticationCleartextPassword',
}
const expectedMD5PasswordMessage = {
name: 'authenticationMD5Password',
salt: Buffer.from([1, 2, 3, 4]),
}
const expectedSASLMessage = {
name: 'authenticationSASL',
mechanisms: ['SCRAM-SHA-256'],
}
const expectedSASLContinueMessage = {
name: 'authenticationSASLContinue',
data: 'data',
}
const expectedSASLFinalMessage = {
name: 'authenticationSASLFinal',
data: 'data',
}
const notificationResponseBuffer = buffers.notification(4, 'hi', 'boom')
const expectedNotificationResponseMessage = {
name: 'notification',
processId: 4,
channel: 'hi',
payload: 'boom',
}
const parseBuffers = async (buffers: Buffer[]): Promise<BackendMessage[]> => {
const stream = new PassThrough()
for (const buffer of buffers) {
stream.write(buffer)
}
stream.end()
const msgs: BackendMessage[] = []
await parse(stream, (msg) => msgs.push(msg))
return msgs
}
describe('PgPacketStream', function () {
testForMessage(authOkBuffer, expectedAuthenticationOkayMessage)
testForMessage(plainPasswordBuffer, expectedPlainPasswordMessage)
testForMessage(md5PasswordBuffer, expectedMD5PasswordMessage)
testForMessage(SASLBuffer, expectedSASLMessage)
testForMessage(SASLContinueBuffer, expectedSASLContinueMessage)
// this exercises a found bug in the parser:
// https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084
// and adds a test which is deterministic, rather than relying on network packet chunking
const extendedSASLContinueBuffer = Buffer.concat([SASLContinueBuffer, Buffer.from([1, 2, 3, 4])])
testForMessage(extendedSASLContinueBuffer, expectedSASLContinueMessage)
testForMessage(SASLFinalBuffer, expectedSASLFinalMessage)
// this exercises a found bug in the parser:
// https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084
// and adds a test which is deterministic, rather than relying on network packet chunking
const extendedSASLFinalBuffer = Buffer.concat([SASLFinalBuffer, Buffer.from([1, 2, 4, 5])])
testForMessage(extendedSASLFinalBuffer, expectedSASLFinalMessage)
testForMessage(paramStatusBuffer, expectedParameterStatusMessage)
testForMessage(backendKeyDataBuffer, expectedBackendKeyDataMessage)
testForMessage(readyForQueryBuffer, expectedReadyForQueryMessage)
testForMessage(commandCompleteBuffer, expectedCommandCompleteMessage)
testForMessage(notificationResponseBuffer, expectedNotificationResponseMessage)
testForMessage(buffers.emptyQuery(), {
name: 'emptyQuery',
length: 4,
})
testForMessage(Buffer.from([0x6e, 0, 0, 0, 4]), {
name: 'noData',
})
describe('rowDescription messages', function () {
testForMessage(emptyRowDescriptionBuffer, expectedEmptyRowDescriptionMessage)
testForMessage(oneRowDescBuff, expectedOneRowMessage)
testForMessage(twoRowBuf, expectedTwoRowMessage)
testForMessage(bigOidDescBuff, expectedBigOidMessage)
})
describe('parameterDescription messages', function () {
testForMessage(emptyParameterDescriptionBuffer, expectedEmptyParameterDescriptionMessage)
testForMessage(oneParameterDescBuf, expectedOneParameterMessage)
testForMessage(twoParameterDescBuf, expectedTwoParameterMessage)
})
describe('parsing rows', function () {
describe('parsing empty row', function () {
testForMessage(emptyRowFieldBuf, {
name: 'dataRow',
fieldCount: 0,
})
})
describe('parsing data row with fields', function () {
testForMessage(oneFieldBuf, {
name: 'dataRow',
fieldCount: 1,
fields: ['test'],
})
})
})
describe('notice message', function () {
// this uses the same logic as error message
const buff = buffers.notice([{ type: 'C', value: 'code' }])
testForMessage(buff, {
name: 'notice',
code: 'code',
})
})
testForMessage(buffers.error([]), {
name: 'error',
})
describe('with all the fields', function () {
const buffer = buffers.error([
{
type: 'S',
value: 'ERROR',
},
{
type: 'C',
value: 'code',
},
{
type: 'M',
value: 'message',
},
{
type: 'D',
value: 'details',
},
{
type: 'H',
value: 'hint',
},
{
type: 'P',
value: '100',
},
{
type: 'p',
value: '101',
},
{
type: 'q',
value: 'query',
},
{
type: 'W',
value: 'where',
},
{
type: 'F',
value: 'file',
},
{
type: 'L',
value: 'line',
},
{
type: 'R',
value: 'routine',
},
{
type: 'Z', // ignored
value: 'alsdkf',
},
])
testForMessage(buffer, {
name: 'error',
severity: 'ERROR',
code: 'code',
message: 'message',
detail: 'details',
hint: 'hint',
position: '100',
internalPosition: '101',
internalQuery: 'query',
where: 'where',
file: 'file',
line: 'line',
routine: 'routine',
})
})
testForMessage(parseCompleteBuffer, {
name: 'parseComplete',
})
testForMessage(bindCompleteBuffer, {
name: 'bindComplete',
})
testForMessage(bindCompleteBuffer, {
name: 'bindComplete',
})
testForMessage(buffers.closeComplete(), {
name: 'closeComplete',
})
describe('parses portal suspended message', function () {
testForMessage(portalSuspendedBuffer, {
name: 'portalSuspended',
})
})
describe('parses replication start message', function () {
testForMessage(Buffer.from([0x57, 0x00, 0x00, 0x00, 0x04]), {
name: 'replicationStart',
length: 4,
})
})
describe('copy', () => {
testForMessage(buffers.copyIn(0), {
name: 'copyInResponse',
length: 7,
binary: false,
columnTypes: [],
})
testForMessage(buffers.copyIn(2), {
name: 'copyInResponse',
length: 11,
binary: false,
columnTypes: [0, 1],
})
testForMessage(buffers.copyOut(0), {
name: 'copyOutResponse',
length: 7,
binary: false,
columnTypes: [],
})
testForMessage(buffers.copyOut(3), {
name: 'copyOutResponse',
length: 13,
binary: false,
columnTypes: [0, 1, 2],
})
testForMessage(buffers.copyDone(), {
name: 'copyDone',
length: 4,
})
testForMessage(buffers.copyData(Buffer.from([5, 6, 7])), {
name: 'copyData',
length: 7,
chunk: Buffer.from([5, 6, 7]),
})
})
// since the data message on a stream can randomly divide the incomming
// tcp packets anywhere, we need to make sure we can parse every single
// split on a tcp message
describe('split buffer, single message parsing', function () {
const fullBuffer = buffers.dataRow([null, 'bang', 'zug zug', null, '!'])
it('parses when full buffer comes in', async function () {
const messages = await parseBuffers([fullBuffer])
const message = messages[0] as any
assert.equal(message.fields.length, 5)
assert.equal(message.fields[0], null)
assert.equal(message.fields[1], 'bang')
assert.equal(message.fields[2], 'zug zug')
assert.equal(message.fields[3], null)
assert.equal(message.fields[4], '!')
})
const testMessageReceivedAfterSplitAt = async function (split: number) {
const firstBuffer = Buffer.alloc(fullBuffer.length - split)
const secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length)
fullBuffer.copy(firstBuffer, 0, 0)
fullBuffer.copy(secondBuffer, 0, firstBuffer.length)
const messages = await parseBuffers([firstBuffer, secondBuffer])
const message = messages[0] as any
assert.equal(message.fields.length, 5)
assert.equal(message.fields[0], null)
assert.equal(message.fields[1], 'bang')
assert.equal(message.fields[2], 'zug zug')
assert.equal(message.fields[3], null)
assert.equal(message.fields[4], '!')
}
it('parses when split in the middle', function () {
return testMessageReceivedAfterSplitAt(6)
})
it('parses when split at end', function () {
return testMessageReceivedAfterSplitAt(2)
})
it('parses when split at beginning', function () {
return Promise.all([
testMessageReceivedAfterSplitAt(fullBuffer.length - 2),
testMessageReceivedAfterSplitAt(fullBuffer.length - 1),
testMessageReceivedAfterSplitAt(fullBuffer.length - 5),
])
})
})
describe('split buffer, multiple message parsing', function () {
const dataRowBuffer = buffers.dataRow(['!'])
const readyForQueryBuffer = buffers.readyForQuery()
const fullBuffer = Buffer.alloc(dataRowBuffer.length + readyForQueryBuffer.length)
dataRowBuffer.copy(fullBuffer, 0, 0)
readyForQueryBuffer.copy(fullBuffer, dataRowBuffer.length, 0)
const verifyMessages = function (messages: any[]) {
assert.strictEqual(messages.length, 2)
assert.deepEqual(messages[0], {
name: 'dataRow',
fieldCount: 1,
length: 11,
fields: ['!'],
})
assert.equal(messages[0].fields[0], '!')
assert.deepEqual(messages[1], {
name: 'readyForQuery',
length: 5,
status: 'I',
})
}
// sanity check
it('receives both messages when packet is not split', async function () {
const messages = await parseBuffers([fullBuffer])
verifyMessages(messages)
})
const splitAndVerifyTwoMessages = async function (split: number) {
const firstBuffer = Buffer.alloc(fullBuffer.length - split)
const secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length)
fullBuffer.copy(firstBuffer, 0, 0)
fullBuffer.copy(secondBuffer, 0, firstBuffer.length)
const messages = await parseBuffers([firstBuffer, secondBuffer])
verifyMessages(messages)
}
describe('receives both messages when packet is split', function () {
it('in the middle', function () {
return splitAndVerifyTwoMessages(11)
})
it('at the front', function () {
return Promise.all([
splitAndVerifyTwoMessages(fullBuffer.length - 1),
splitAndVerifyTwoMessages(fullBuffer.length - 4),
splitAndVerifyTwoMessages(fullBuffer.length - 6),
])
})
it('at the end', function () {
return Promise.all([splitAndVerifyTwoMessages(8), splitAndVerifyTwoMessages(1)])
})
})
})
})

11
node_modules/pg-protocol/src/index.ts generated vendored Normal file
View File

@ -0,0 +1,11 @@
import { DatabaseError } from './messages'
import { serialize } from './serializer'
import { Parser, MessageCallback } from './parser'
export function parse(stream: NodeJS.ReadableStream, callback: MessageCallback): Promise<void> {
const parser = new Parser()
stream.on('data', (buffer: Buffer) => parser.parse(buffer, callback))
return new Promise((resolve) => stream.on('end', () => resolve()))
}
export { serialize, DatabaseError }

262
node_modules/pg-protocol/src/messages.ts generated vendored Normal file
View File

@ -0,0 +1,262 @@
export type Mode = 'text' | 'binary'
export type MessageName =
| 'parseComplete'
| 'bindComplete'
| 'closeComplete'
| 'noData'
| 'portalSuspended'
| 'replicationStart'
| 'emptyQuery'
| 'copyDone'
| 'copyData'
| 'rowDescription'
| 'parameterDescription'
| 'parameterStatus'
| 'backendKeyData'
| 'notification'
| 'readyForQuery'
| 'commandComplete'
| 'dataRow'
| 'copyInResponse'
| 'copyOutResponse'
| 'authenticationOk'
| 'authenticationMD5Password'
| 'authenticationCleartextPassword'
| 'authenticationSASL'
| 'authenticationSASLContinue'
| 'authenticationSASLFinal'
| 'error'
| 'notice'
export interface BackendMessage {
name: MessageName
length: number
}
export const parseComplete: BackendMessage = {
name: 'parseComplete',
length: 5,
}
export const bindComplete: BackendMessage = {
name: 'bindComplete',
length: 5,
}
export const closeComplete: BackendMessage = {
name: 'closeComplete',
length: 5,
}
export const noData: BackendMessage = {
name: 'noData',
length: 5,
}
export const portalSuspended: BackendMessage = {
name: 'portalSuspended',
length: 5,
}
export const replicationStart: BackendMessage = {
name: 'replicationStart',
length: 4,
}
export const emptyQuery: BackendMessage = {
name: 'emptyQuery',
length: 4,
}
export const copyDone: BackendMessage = {
name: 'copyDone',
length: 4,
}
interface NoticeOrError {
message: string | undefined
severity: string | undefined
code: string | undefined
detail: string | undefined
hint: string | undefined
position: string | undefined
internalPosition: string | undefined
internalQuery: string | undefined
where: string | undefined
schema: string | undefined
table: string | undefined
column: string | undefined
dataType: string | undefined
constraint: string | undefined
file: string | undefined
line: string | undefined
routine: string | undefined
}
export class DatabaseError extends Error implements NoticeOrError {
public severity: string | undefined
public code: string | undefined
public detail: string | undefined
public hint: string | undefined
public position: string | undefined
public internalPosition: string | undefined
public internalQuery: string | undefined
public where: string | undefined
public schema: string | undefined
public table: string | undefined
public column: string | undefined
public dataType: string | undefined
public constraint: string | undefined
public file: string | undefined
public line: string | undefined
public routine: string | undefined
constructor(
message: string,
public readonly length: number,
public readonly name: MessageName
) {
super(message)
}
}
export class CopyDataMessage {
public readonly name = 'copyData'
constructor(
public readonly length: number,
public readonly chunk: Buffer
) {}
}
export class CopyResponse {
public readonly columnTypes: number[]
constructor(
public readonly length: number,
public readonly name: MessageName,
public readonly binary: boolean,
columnCount: number
) {
this.columnTypes = new Array(columnCount)
}
}
export class Field {
constructor(
public readonly name: string,
public readonly tableID: number,
public readonly columnID: number,
public readonly dataTypeID: number,
public readonly dataTypeSize: number,
public readonly dataTypeModifier: number,
public readonly format: Mode
) {}
}
export class RowDescriptionMessage {
public readonly name: MessageName = 'rowDescription'
public readonly fields: Field[]
constructor(
public readonly length: number,
public readonly fieldCount: number
) {
this.fields = new Array(this.fieldCount)
}
}
export class ParameterDescriptionMessage {
public readonly name: MessageName = 'parameterDescription'
public readonly dataTypeIDs: number[]
constructor(
public readonly length: number,
public readonly parameterCount: number
) {
this.dataTypeIDs = new Array(this.parameterCount)
}
}
export class ParameterStatusMessage {
public readonly name: MessageName = 'parameterStatus'
constructor(
public readonly length: number,
public readonly parameterName: string,
public readonly parameterValue: string
) {}
}
export class AuthenticationMD5Password implements BackendMessage {
public readonly name: MessageName = 'authenticationMD5Password'
constructor(
public readonly length: number,
public readonly salt: Buffer
) {}
}
export class BackendKeyDataMessage {
public readonly name: MessageName = 'backendKeyData'
constructor(
public readonly length: number,
public readonly processID: number,
public readonly secretKey: number
) {}
}
export class NotificationResponseMessage {
public readonly name: MessageName = 'notification'
constructor(
public readonly length: number,
public readonly processId: number,
public readonly channel: string,
public readonly payload: string
) {}
}
export class ReadyForQueryMessage {
public readonly name: MessageName = 'readyForQuery'
constructor(
public readonly length: number,
public readonly status: string
) {}
}
export class CommandCompleteMessage {
public readonly name: MessageName = 'commandComplete'
constructor(
public readonly length: number,
public readonly text: string
) {}
}
export class DataRowMessage {
public readonly fieldCount: number
public readonly name: MessageName = 'dataRow'
constructor(
public length: number,
public fields: any[]
) {
this.fieldCount = fields.length
}
}
export class NoticeMessage implements BackendMessage, NoticeOrError {
constructor(
public readonly length: number,
public readonly message: string | undefined
) {}
public readonly name = 'notice'
public severity: string | undefined
public code: string | undefined
public detail: string | undefined
public hint: string | undefined
public position: string | undefined
public internalPosition: string | undefined
public internalQuery: string | undefined
public where: string | undefined
public schema: string | undefined
public table: string | undefined
public column: string | undefined
public dataType: string | undefined
public constraint: string | undefined
public file: string | undefined
public line: string | undefined
public routine: string | undefined
}

View File

@ -0,0 +1,276 @@
import assert from 'assert'
import { serialize } from './serializer'
import BufferList from './testing/buffer-list'
describe('serializer', () => {
it('builds startup message', function () {
const actual = serialize.startup({
user: 'brian',
database: 'bang',
})
assert.deepEqual(
actual,
new BufferList()
.addInt16(3)
.addInt16(0)
.addCString('user')
.addCString('brian')
.addCString('database')
.addCString('bang')
.addCString('client_encoding')
.addCString('UTF8')
.addCString('')
.join(true)
)
})
it('builds password message', function () {
const actual = serialize.password('!')
assert.deepEqual(actual, new BufferList().addCString('!').join(true, 'p'))
})
it('builds request ssl message', function () {
const actual = serialize.requestSsl()
const expected = new BufferList().addInt32(80877103).join(true)
assert.deepEqual(actual, expected)
})
it('builds SASLInitialResponseMessage message', function () {
const actual = serialize.sendSASLInitialResponseMessage('mech', 'data')
assert.deepEqual(actual, new BufferList().addCString('mech').addInt32(4).addString('data').join(true, 'p'))
})
it('builds SCRAMClientFinalMessage message', function () {
const actual = serialize.sendSCRAMClientFinalMessage('data')
assert.deepEqual(actual, new BufferList().addString('data').join(true, 'p'))
})
it('builds query message', function () {
const txt = 'select * from boom'
const actual = serialize.query(txt)
assert.deepEqual(actual, new BufferList().addCString(txt).join(true, 'Q'))
})
describe('parse message', () => {
it('builds parse message', function () {
const actual = serialize.parse({ text: '!' })
const expected = new BufferList().addCString('').addCString('!').addInt16(0).join(true, 'P')
assert.deepEqual(actual, expected)
})
it('builds parse message with named query', function () {
const actual = serialize.parse({
name: 'boom',
text: 'select * from boom',
types: [],
})
const expected = new BufferList().addCString('boom').addCString('select * from boom').addInt16(0).join(true, 'P')
assert.deepEqual(actual, expected)
})
it('with multiple parameters', function () {
const actual = serialize.parse({
name: 'force',
text: 'select * from bang where name = $1',
types: [1, 2, 3, 4],
})
const expected = new BufferList()
.addCString('force')
.addCString('select * from bang where name = $1')
.addInt16(4)
.addInt32(1)
.addInt32(2)
.addInt32(3)
.addInt32(4)
.join(true, 'P')
assert.deepEqual(actual, expected)
})
})
describe('bind messages', function () {
it('with no values', function () {
const actual = serialize.bind()
const expectedBuffer = new BufferList()
.addCString('')
.addCString('')
.addInt16(0)
.addInt16(0)
.addInt16(1)
.addInt16(0)
.join(true, 'B')
assert.deepEqual(actual, expectedBuffer)
})
it('with named statement, portal, and values', function () {
const actual = serialize.bind({
portal: 'bang',
statement: 'woo',
values: ['1', 'hi', null, 'zing'],
})
const expectedBuffer = new BufferList()
.addCString('bang') // portal name
.addCString('woo') // statement name
.addInt16(4)
.addInt16(0)
.addInt16(0)
.addInt16(0)
.addInt16(0)
.addInt16(4)
.addInt32(1)
.add(Buffer.from('1'))
.addInt32(2)
.add(Buffer.from('hi'))
.addInt32(-1)
.addInt32(4)
.add(Buffer.from('zing'))
.addInt16(1)
.addInt16(0)
.join(true, 'B')
assert.deepEqual(actual, expectedBuffer)
})
})
it('with custom valueMapper', function () {
const actual = serialize.bind({
portal: 'bang',
statement: 'woo',
values: ['1', 'hi', null, 'zing'],
valueMapper: () => null,
})
const expectedBuffer = new BufferList()
.addCString('bang') // portal name
.addCString('woo') // statement name
.addInt16(4)
.addInt16(0)
.addInt16(0)
.addInt16(0)
.addInt16(0)
.addInt16(4)
.addInt32(-1)
.addInt32(-1)
.addInt32(-1)
.addInt32(-1)
.addInt16(1)
.addInt16(0)
.join(true, 'B')
assert.deepEqual(actual, expectedBuffer)
})
it('with named statement, portal, and buffer value', function () {
const actual = serialize.bind({
portal: 'bang',
statement: 'woo',
values: ['1', 'hi', null, Buffer.from('zing', 'utf8')],
})
const expectedBuffer = new BufferList()
.addCString('bang') // portal name
.addCString('woo') // statement name
.addInt16(4) // value count
.addInt16(0) // string
.addInt16(0) // string
.addInt16(0) // string
.addInt16(1) // binary
.addInt16(4)
.addInt32(1)
.add(Buffer.from('1'))
.addInt32(2)
.add(Buffer.from('hi'))
.addInt32(-1)
.addInt32(4)
.add(Buffer.from('zing', 'utf-8'))
.addInt16(1)
.addInt16(0)
.join(true, 'B')
assert.deepEqual(actual, expectedBuffer)
})
describe('builds execute message', function () {
it('for unamed portal with no row limit', function () {
const actual = serialize.execute()
const expectedBuffer = new BufferList().addCString('').addInt32(0).join(true, 'E')
assert.deepEqual(actual, expectedBuffer)
})
it('for named portal with row limit', function () {
const actual = serialize.execute({
portal: 'my favorite portal',
rows: 100,
})
const expectedBuffer = new BufferList().addCString('my favorite portal').addInt32(100).join(true, 'E')
assert.deepEqual(actual, expectedBuffer)
})
})
it('builds flush command', function () {
const actual = serialize.flush()
const expected = new BufferList().join(true, 'H')
assert.deepEqual(actual, expected)
})
it('builds sync command', function () {
const actual = serialize.sync()
const expected = new BufferList().join(true, 'S')
assert.deepEqual(actual, expected)
})
it('builds end command', function () {
const actual = serialize.end()
const expected = Buffer.from([0x58, 0, 0, 0, 4])
assert.deepEqual(actual, expected)
})
describe('builds describe command', function () {
it('describe statement', function () {
const actual = serialize.describe({ type: 'S', name: 'bang' })
const expected = new BufferList().addChar('S').addCString('bang').join(true, 'D')
assert.deepEqual(actual, expected)
})
it('describe unnamed portal', function () {
const actual = serialize.describe({ type: 'P' })
const expected = new BufferList().addChar('P').addCString('').join(true, 'D')
assert.deepEqual(actual, expected)
})
})
describe('builds close command', function () {
it('describe statement', function () {
const actual = serialize.close({ type: 'S', name: 'bang' })
const expected = new BufferList().addChar('S').addCString('bang').join(true, 'C')
assert.deepEqual(actual, expected)
})
it('describe unnamed portal', function () {
const actual = serialize.close({ type: 'P' })
const expected = new BufferList().addChar('P').addCString('').join(true, 'C')
assert.deepEqual(actual, expected)
})
})
describe('copy messages', function () {
it('builds copyFromChunk', () => {
const actual = serialize.copyData(Buffer.from([1, 2, 3]))
const expected = new BufferList().add(Buffer.from([1, 2, 3])).join(true, 'd')
assert.deepEqual(actual, expected)
})
it('builds copy fail', () => {
const actual = serialize.copyFail('err!')
const expected = new BufferList().addCString('err!').join(true, 'f')
assert.deepEqual(actual, expected)
})
it('builds copy done', () => {
const actual = serialize.copyDone()
const expected = new BufferList().join(true, 'c')
assert.deepEqual(actual, expected)
})
})
it('builds cancel message', () => {
const actual = serialize.cancel(3, 4)
const expected = new BufferList().addInt16(1234).addInt16(5678).addInt32(3).addInt32(4).join(true)
assert.deepEqual(actual, expected)
})
})

389
node_modules/pg-protocol/src/parser.ts generated vendored Normal file
View File

@ -0,0 +1,389 @@
import { TransformOptions } from 'stream'
import {
Mode,
bindComplete,
parseComplete,
closeComplete,
noData,
portalSuspended,
copyDone,
replicationStart,
emptyQuery,
ReadyForQueryMessage,
CommandCompleteMessage,
CopyDataMessage,
CopyResponse,
NotificationResponseMessage,
RowDescriptionMessage,
ParameterDescriptionMessage,
Field,
DataRowMessage,
ParameterStatusMessage,
BackendKeyDataMessage,
DatabaseError,
BackendMessage,
MessageName,
AuthenticationMD5Password,
NoticeMessage,
} from './messages'
import { BufferReader } from './buffer-reader'
// every message is prefixed with a single bye
const CODE_LENGTH = 1
// every message has an int32 length which includes itself but does
// NOT include the code in the length
const LEN_LENGTH = 4
const HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH
export type Packet = {
code: number
packet: Buffer
}
const emptyBuffer = Buffer.allocUnsafe(0)
type StreamOptions = TransformOptions & {
mode: Mode
}
const enum MessageCodes {
DataRow = 0x44, // D
ParseComplete = 0x31, // 1
BindComplete = 0x32, // 2
CloseComplete = 0x33, // 3
CommandComplete = 0x43, // C
ReadyForQuery = 0x5a, // Z
NoData = 0x6e, // n
NotificationResponse = 0x41, // A
AuthenticationResponse = 0x52, // R
ParameterStatus = 0x53, // S
BackendKeyData = 0x4b, // K
ErrorMessage = 0x45, // E
NoticeMessage = 0x4e, // N
RowDescriptionMessage = 0x54, // T
ParameterDescriptionMessage = 0x74, // t
PortalSuspended = 0x73, // s
ReplicationStart = 0x57, // W
EmptyQuery = 0x49, // I
CopyIn = 0x47, // G
CopyOut = 0x48, // H
CopyDone = 0x63, // c
CopyData = 0x64, // d
}
export type MessageCallback = (msg: BackendMessage) => void
export class Parser {
private buffer: Buffer = emptyBuffer
private bufferLength: number = 0
private bufferOffset: number = 0
private reader = new BufferReader()
private mode: Mode
constructor(opts?: StreamOptions) {
if (opts?.mode === 'binary') {
throw new Error('Binary mode not supported yet')
}
this.mode = opts?.mode || 'text'
}
public parse(buffer: Buffer, callback: MessageCallback) {
this.mergeBuffer(buffer)
const bufferFullLength = this.bufferOffset + this.bufferLength
let offset = this.bufferOffset
while (offset + HEADER_LENGTH <= bufferFullLength) {
// code is 1 byte long - it identifies the message type
const code = this.buffer[offset]
// length is 1 Uint32BE - it is the length of the message EXCLUDING the code
const length = this.buffer.readUInt32BE(offset + CODE_LENGTH)
const fullMessageLength = CODE_LENGTH + length
if (fullMessageLength + offset <= bufferFullLength) {
const message = this.handlePacket(offset + HEADER_LENGTH, code, length, this.buffer)
callback(message)
offset += fullMessageLength
} else {
break
}
}
if (offset === bufferFullLength) {
// No more use for the buffer
this.buffer = emptyBuffer
this.bufferLength = 0
this.bufferOffset = 0
} else {
// Adjust the cursors of remainingBuffer
this.bufferLength = bufferFullLength - offset
this.bufferOffset = offset
}
}
private mergeBuffer(buffer: Buffer): void {
if (this.bufferLength > 0) {
const newLength = this.bufferLength + buffer.byteLength
const newFullLength = newLength + this.bufferOffset
if (newFullLength > this.buffer.byteLength) {
// We can't concat the new buffer with the remaining one
let newBuffer: Buffer
if (newLength <= this.buffer.byteLength && this.bufferOffset >= this.bufferLength) {
// We can move the relevant part to the beginning of the buffer instead of allocating a new buffer
newBuffer = this.buffer
} else {
// Allocate a new larger buffer
let newBufferLength = this.buffer.byteLength * 2
while (newLength >= newBufferLength) {
newBufferLength *= 2
}
newBuffer = Buffer.allocUnsafe(newBufferLength)
}
// Move the remaining buffer to the new one
this.buffer.copy(newBuffer, 0, this.bufferOffset, this.bufferOffset + this.bufferLength)
this.buffer = newBuffer
this.bufferOffset = 0
}
// Concat the new buffer with the remaining one
buffer.copy(this.buffer, this.bufferOffset + this.bufferLength)
this.bufferLength = newLength
} else {
this.buffer = buffer
this.bufferOffset = 0
this.bufferLength = buffer.byteLength
}
}
private handlePacket(offset: number, code: number, length: number, bytes: Buffer): BackendMessage {
switch (code) {
case MessageCodes.BindComplete:
return bindComplete
case MessageCodes.ParseComplete:
return parseComplete
case MessageCodes.CloseComplete:
return closeComplete
case MessageCodes.NoData:
return noData
case MessageCodes.PortalSuspended:
return portalSuspended
case MessageCodes.CopyDone:
return copyDone
case MessageCodes.ReplicationStart:
return replicationStart
case MessageCodes.EmptyQuery:
return emptyQuery
case MessageCodes.DataRow:
return this.parseDataRowMessage(offset, length, bytes)
case MessageCodes.CommandComplete:
return this.parseCommandCompleteMessage(offset, length, bytes)
case MessageCodes.ReadyForQuery:
return this.parseReadyForQueryMessage(offset, length, bytes)
case MessageCodes.NotificationResponse:
return this.parseNotificationMessage(offset, length, bytes)
case MessageCodes.AuthenticationResponse:
return this.parseAuthenticationResponse(offset, length, bytes)
case MessageCodes.ParameterStatus:
return this.parseParameterStatusMessage(offset, length, bytes)
case MessageCodes.BackendKeyData:
return this.parseBackendKeyData(offset, length, bytes)
case MessageCodes.ErrorMessage:
return this.parseErrorMessage(offset, length, bytes, 'error')
case MessageCodes.NoticeMessage:
return this.parseErrorMessage(offset, length, bytes, 'notice')
case MessageCodes.RowDescriptionMessage:
return this.parseRowDescriptionMessage(offset, length, bytes)
case MessageCodes.ParameterDescriptionMessage:
return this.parseParameterDescriptionMessage(offset, length, bytes)
case MessageCodes.CopyIn:
return this.parseCopyInMessage(offset, length, bytes)
case MessageCodes.CopyOut:
return this.parseCopyOutMessage(offset, length, bytes)
case MessageCodes.CopyData:
return this.parseCopyData(offset, length, bytes)
default:
return new DatabaseError('received invalid response: ' + code.toString(16), length, 'error')
}
}
private parseReadyForQueryMessage(offset: number, length: number, bytes: Buffer) {
this.reader.setBuffer(offset, bytes)
const status = this.reader.string(1)
return new ReadyForQueryMessage(length, status)
}
private parseCommandCompleteMessage(offset: number, length: number, bytes: Buffer) {
this.reader.setBuffer(offset, bytes)
const text = this.reader.cstring()
return new CommandCompleteMessage(length, text)
}
private parseCopyData(offset: number, length: number, bytes: Buffer) {
const chunk = bytes.slice(offset, offset + (length - 4))
return new CopyDataMessage(length, chunk)
}
private parseCopyInMessage(offset: number, length: number, bytes: Buffer) {
return this.parseCopyMessage(offset, length, bytes, 'copyInResponse')
}
private parseCopyOutMessage(offset: number, length: number, bytes: Buffer) {
return this.parseCopyMessage(offset, length, bytes, 'copyOutResponse')
}
private parseCopyMessage(offset: number, length: number, bytes: Buffer, messageName: MessageName) {
this.reader.setBuffer(offset, bytes)
const isBinary = this.reader.byte() !== 0
const columnCount = this.reader.int16()
const message = new CopyResponse(length, messageName, isBinary, columnCount)
for (let i = 0; i < columnCount; i++) {
message.columnTypes[i] = this.reader.int16()
}
return message
}
private parseNotificationMessage(offset: number, length: number, bytes: Buffer) {
this.reader.setBuffer(offset, bytes)
const processId = this.reader.int32()
const channel = this.reader.cstring()
const payload = this.reader.cstring()
return new NotificationResponseMessage(length, processId, channel, payload)
}
private parseRowDescriptionMessage(offset: number, length: number, bytes: Buffer) {
this.reader.setBuffer(offset, bytes)
const fieldCount = this.reader.int16()
const message = new RowDescriptionMessage(length, fieldCount)
for (let i = 0; i < fieldCount; i++) {
message.fields[i] = this.parseField()
}
return message
}
private parseField(): Field {
const name = this.reader.cstring()
const tableID = this.reader.uint32()
const columnID = this.reader.int16()
const dataTypeID = this.reader.uint32()
const dataTypeSize = this.reader.int16()
const dataTypeModifier = this.reader.int32()
const mode = this.reader.int16() === 0 ? 'text' : 'binary'
return new Field(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, mode)
}
private parseParameterDescriptionMessage(offset: number, length: number, bytes: Buffer) {
this.reader.setBuffer(offset, bytes)
const parameterCount = this.reader.int16()
const message = new ParameterDescriptionMessage(length, parameterCount)
for (let i = 0; i < parameterCount; i++) {
message.dataTypeIDs[i] = this.reader.int32()
}
return message
}
private parseDataRowMessage(offset: number, length: number, bytes: Buffer) {
this.reader.setBuffer(offset, bytes)
const fieldCount = this.reader.int16()
const fields: any[] = new Array(fieldCount)
for (let i = 0; i < fieldCount; i++) {
const len = this.reader.int32()
// a -1 for length means the value of the field is null
fields[i] = len === -1 ? null : this.reader.string(len)
}
return new DataRowMessage(length, fields)
}
private parseParameterStatusMessage(offset: number, length: number, bytes: Buffer) {
this.reader.setBuffer(offset, bytes)
const name = this.reader.cstring()
const value = this.reader.cstring()
return new ParameterStatusMessage(length, name, value)
}
private parseBackendKeyData(offset: number, length: number, bytes: Buffer) {
this.reader.setBuffer(offset, bytes)
const processID = this.reader.int32()
const secretKey = this.reader.int32()
return new BackendKeyDataMessage(length, processID, secretKey)
}
public parseAuthenticationResponse(offset: number, length: number, bytes: Buffer) {
this.reader.setBuffer(offset, bytes)
const code = this.reader.int32()
// TODO(bmc): maybe better types here
const message: BackendMessage & any = {
name: 'authenticationOk',
length,
}
switch (code) {
case 0: // AuthenticationOk
break
case 3: // AuthenticationCleartextPassword
if (message.length === 8) {
message.name = 'authenticationCleartextPassword'
}
break
case 5: // AuthenticationMD5Password
if (message.length === 12) {
message.name = 'authenticationMD5Password'
const salt = this.reader.bytes(4)
return new AuthenticationMD5Password(length, salt)
}
break
case 10: // AuthenticationSASL
{
message.name = 'authenticationSASL'
message.mechanisms = []
let mechanism: string
do {
mechanism = this.reader.cstring()
if (mechanism) {
message.mechanisms.push(mechanism)
}
} while (mechanism)
}
break
case 11: // AuthenticationSASLContinue
message.name = 'authenticationSASLContinue'
message.data = this.reader.string(length - 8)
break
case 12: // AuthenticationSASLFinal
message.name = 'authenticationSASLFinal'
message.data = this.reader.string(length - 8)
break
default:
throw new Error('Unknown authenticationOk message type ' + code)
}
return message
}
private parseErrorMessage(offset: number, length: number, bytes: Buffer, name: MessageName) {
this.reader.setBuffer(offset, bytes)
const fields: Record<string, string> = {}
let fieldType = this.reader.string(1)
while (fieldType !== '\0') {
fields[fieldType] = this.reader.cstring()
fieldType = this.reader.string(1)
}
const messageValue = fields.M
const message =
name === 'notice' ? new NoticeMessage(length, messageValue) : new DatabaseError(messageValue, length, name)
message.severity = fields.S
message.code = fields.C
message.detail = fields.D
message.hint = fields.H
message.position = fields.P
message.internalPosition = fields.p
message.internalQuery = fields.q
message.where = fields.W
message.schema = fields.s
message.table = fields.t
message.column = fields.c
message.dataType = fields.d
message.constraint = fields.n
message.file = fields.F
message.line = fields.L
message.routine = fields.R
return message
}
}

274
node_modules/pg-protocol/src/serializer.ts generated vendored Normal file
View File

@ -0,0 +1,274 @@
import { Writer } from './buffer-writer'
const enum code {
startup = 0x70,
query = 0x51,
parse = 0x50,
bind = 0x42,
execute = 0x45,
flush = 0x48,
sync = 0x53,
end = 0x58,
close = 0x43,
describe = 0x44,
copyFromChunk = 0x64,
copyDone = 0x63,
copyFail = 0x66,
}
const writer = new Writer()
const startup = (opts: Record<string, string>): Buffer => {
// protocol version
writer.addInt16(3).addInt16(0)
for (const key of Object.keys(opts)) {
writer.addCString(key).addCString(opts[key])
}
writer.addCString('client_encoding').addCString('UTF8')
const bodyBuffer = writer.addCString('').flush()
// this message is sent without a code
const length = bodyBuffer.length + 4
return new Writer().addInt32(length).add(bodyBuffer).flush()
}
const requestSsl = (): Buffer => {
const response = Buffer.allocUnsafe(8)
response.writeInt32BE(8, 0)
response.writeInt32BE(80877103, 4)
return response
}
const password = (password: string): Buffer => {
return writer.addCString(password).flush(code.startup)
}
const sendSASLInitialResponseMessage = function (mechanism: string, initialResponse: string): Buffer {
// 0x70 = 'p'
writer.addCString(mechanism).addInt32(Buffer.byteLength(initialResponse)).addString(initialResponse)
return writer.flush(code.startup)
}
const sendSCRAMClientFinalMessage = function (additionalData: string): Buffer {
return writer.addString(additionalData).flush(code.startup)
}
const query = (text: string): Buffer => {
return writer.addCString(text).flush(code.query)
}
type ParseOpts = {
name?: string
types?: number[]
text: string
}
const emptyArray: any[] = []
const parse = (query: ParseOpts): Buffer => {
// expect something like this:
// { name: 'queryName',
// text: 'select * from blah',
// types: ['int8', 'bool'] }
// normalize missing query names to allow for null
const name = query.name || ''
if (name.length > 63) {
console.error('Warning! Postgres only supports 63 characters for query names.')
console.error('You supplied %s (%s)', name, name.length)
console.error('This can cause conflicts and silent errors executing queries')
}
const types = query.types || emptyArray
const len = types.length
const buffer = writer
.addCString(name) // name of query
.addCString(query.text) // actual query text
.addInt16(len)
for (let i = 0; i < len; i++) {
buffer.addInt32(types[i])
}
return writer.flush(code.parse)
}
type ValueMapper = (param: any, index: number) => any
type BindOpts = {
portal?: string
binary?: boolean
statement?: string
values?: any[]
// optional map from JS value to postgres value per parameter
valueMapper?: ValueMapper
}
const paramWriter = new Writer()
// make this a const enum so typescript will inline the value
const enum ParamType {
STRING = 0,
BINARY = 1,
}
const writeValues = function (values: any[], valueMapper?: ValueMapper): void {
for (let i = 0; i < values.length; i++) {
const mappedVal = valueMapper ? valueMapper(values[i], i) : values[i]
if (mappedVal == null) {
// add the param type (string) to the writer
writer.addInt16(ParamType.STRING)
// write -1 to the param writer to indicate null
paramWriter.addInt32(-1)
} else if (mappedVal instanceof Buffer) {
// add the param type (binary) to the writer
writer.addInt16(ParamType.BINARY)
// add the buffer to the param writer
paramWriter.addInt32(mappedVal.length)
paramWriter.add(mappedVal)
} else {
// add the param type (string) to the writer
writer.addInt16(ParamType.STRING)
paramWriter.addInt32(Buffer.byteLength(mappedVal))
paramWriter.addString(mappedVal)
}
}
}
const bind = (config: BindOpts = {}): Buffer => {
// normalize config
const portal = config.portal || ''
const statement = config.statement || ''
const binary = config.binary || false
const values = config.values || emptyArray
const len = values.length
writer.addCString(portal).addCString(statement)
writer.addInt16(len)
writeValues(values, config.valueMapper)
writer.addInt16(len)
writer.add(paramWriter.flush())
// all results use the same format code
writer.addInt16(1)
// format code
writer.addInt16(binary ? ParamType.BINARY : ParamType.STRING)
return writer.flush(code.bind)
}
type ExecOpts = {
portal?: string
rows?: number
}
const emptyExecute = Buffer.from([code.execute, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00])
const execute = (config?: ExecOpts): Buffer => {
// this is the happy path for most queries
if (!config || (!config.portal && !config.rows)) {
return emptyExecute
}
const portal = config.portal || ''
const rows = config.rows || 0
const portalLength = Buffer.byteLength(portal)
const len = 4 + portalLength + 1 + 4
// one extra bit for code
const buff = Buffer.allocUnsafe(1 + len)
buff[0] = code.execute
buff.writeInt32BE(len, 1)
buff.write(portal, 5, 'utf-8')
buff[portalLength + 5] = 0 // null terminate portal cString
buff.writeUInt32BE(rows, buff.length - 4)
return buff
}
const cancel = (processID: number, secretKey: number): Buffer => {
const buffer = Buffer.allocUnsafe(16)
buffer.writeInt32BE(16, 0)
buffer.writeInt16BE(1234, 4)
buffer.writeInt16BE(5678, 6)
buffer.writeInt32BE(processID, 8)
buffer.writeInt32BE(secretKey, 12)
return buffer
}
type PortalOpts = {
type: 'S' | 'P'
name?: string
}
const cstringMessage = (code: code, string: string): Buffer => {
const stringLen = Buffer.byteLength(string)
const len = 4 + stringLen + 1
// one extra bit for code
const buffer = Buffer.allocUnsafe(1 + len)
buffer[0] = code
buffer.writeInt32BE(len, 1)
buffer.write(string, 5, 'utf-8')
buffer[len] = 0 // null terminate cString
return buffer
}
const emptyDescribePortal = writer.addCString('P').flush(code.describe)
const emptyDescribeStatement = writer.addCString('S').flush(code.describe)
const describe = (msg: PortalOpts): Buffer => {
return msg.name
? cstringMessage(code.describe, `${msg.type}${msg.name || ''}`)
: msg.type === 'P'
? emptyDescribePortal
: emptyDescribeStatement
}
const close = (msg: PortalOpts): Buffer => {
const text = `${msg.type}${msg.name || ''}`
return cstringMessage(code.close, text)
}
const copyData = (chunk: Buffer): Buffer => {
return writer.add(chunk).flush(code.copyFromChunk)
}
const copyFail = (message: string): Buffer => {
return cstringMessage(code.copyFail, message)
}
const codeOnlyBuffer = (code: code): Buffer => Buffer.from([code, 0x00, 0x00, 0x00, 0x04])
const flushBuffer = codeOnlyBuffer(code.flush)
const syncBuffer = codeOnlyBuffer(code.sync)
const endBuffer = codeOnlyBuffer(code.end)
const copyDoneBuffer = codeOnlyBuffer(code.copyDone)
const serialize = {
startup,
password,
requestSsl,
sendSASLInitialResponseMessage,
sendSCRAMClientFinalMessage,
query,
parse,
bind,
execute,
describe,
close,
flush: () => flushBuffer,
sync: () => syncBuffer,
end: () => endBuffer,
copyData,
copyDone: () => copyDoneBuffer,
copyFail,
cancel,
}
export { serialize }

67
node_modules/pg-protocol/src/testing/buffer-list.ts generated vendored Normal file
View File

@ -0,0 +1,67 @@
export default class BufferList {
constructor(public buffers: Buffer[] = []) {}
public add(buffer: Buffer, front?: boolean) {
this.buffers[front ? 'unshift' : 'push'](buffer)
return this
}
public addInt16(val: number, front?: boolean) {
return this.add(Buffer.from([val >>> 8, val >>> 0]), front)
}
public getByteLength() {
return this.buffers.reduce(function (previous, current) {
return previous + current.length
}, 0)
}
public addInt32(val: number, first?: boolean) {
return this.add(
Buffer.from([(val >>> 24) & 0xff, (val >>> 16) & 0xff, (val >>> 8) & 0xff, (val >>> 0) & 0xff]),
first
)
}
public addCString(val: string, front?: boolean) {
const len = Buffer.byteLength(val)
const buffer = Buffer.alloc(len + 1)
buffer.write(val)
buffer[len] = 0
return this.add(buffer, front)
}
public addString(val: string, front?: boolean) {
const len = Buffer.byteLength(val)
const buffer = Buffer.alloc(len)
buffer.write(val)
return this.add(buffer, front)
}
public addChar(char: string, first?: boolean) {
return this.add(Buffer.from(char, 'utf8'), first)
}
public addByte(byte: number) {
return this.add(Buffer.from([byte]))
}
public join(appendLength?: boolean, char?: string): Buffer {
let length = this.getByteLength()
if (appendLength) {
this.addInt32(length + 4, true)
return this.join(false, char)
}
if (char) {
this.addChar(char, true)
length++
}
const result = Buffer.alloc(length)
let index = 0
this.buffers.forEach(function (buffer) {
buffer.copy(result, index, 0)
index += buffer.length
})
return result
}
}

166
node_modules/pg-protocol/src/testing/test-buffers.ts generated vendored Normal file
View File

@ -0,0 +1,166 @@
// https://www.postgresql.org/docs/current/protocol-message-formats.html
import BufferList from './buffer-list'
const buffers = {
readyForQuery: function () {
return new BufferList().add(Buffer.from('I')).join(true, 'Z')
},
authenticationOk: function () {
return new BufferList().addInt32(0).join(true, 'R')
},
authenticationCleartextPassword: function () {
return new BufferList().addInt32(3).join(true, 'R')
},
authenticationMD5Password: function () {
return new BufferList()
.addInt32(5)
.add(Buffer.from([1, 2, 3, 4]))
.join(true, 'R')
},
authenticationSASL: function () {
return new BufferList().addInt32(10).addCString('SCRAM-SHA-256').addCString('').join(true, 'R')
},
authenticationSASLContinue: function () {
return new BufferList().addInt32(11).addString('data').join(true, 'R')
},
authenticationSASLFinal: function () {
return new BufferList().addInt32(12).addString('data').join(true, 'R')
},
parameterStatus: function (name: string, value: string) {
return new BufferList().addCString(name).addCString(value).join(true, 'S')
},
backendKeyData: function (processID: number, secretKey: number) {
return new BufferList().addInt32(processID).addInt32(secretKey).join(true, 'K')
},
commandComplete: function (string: string) {
return new BufferList().addCString(string).join(true, 'C')
},
rowDescription: function (fields: any[]) {
fields = fields || []
const buf = new BufferList()
buf.addInt16(fields.length)
fields.forEach(function (field) {
buf
.addCString(field.name)
.addInt32(field.tableID || 0)
.addInt16(field.attributeNumber || 0)
.addInt32(field.dataTypeID || 0)
.addInt16(field.dataTypeSize || 0)
.addInt32(field.typeModifier || 0)
.addInt16(field.formatCode || 0)
})
return buf.join(true, 'T')
},
parameterDescription: function (dataTypeIDs: number[]) {
dataTypeIDs = dataTypeIDs || []
const buf = new BufferList()
buf.addInt16(dataTypeIDs.length)
dataTypeIDs.forEach(function (dataTypeID) {
buf.addInt32(dataTypeID)
})
return buf.join(true, 't')
},
dataRow: function (columns: any[]) {
columns = columns || []
const buf = new BufferList()
buf.addInt16(columns.length)
columns.forEach(function (col) {
if (col == null) {
buf.addInt32(-1)
} else {
const strBuf = Buffer.from(col, 'utf8')
buf.addInt32(strBuf.length)
buf.add(strBuf)
}
})
return buf.join(true, 'D')
},
error: function (fields: any) {
return buffers.errorOrNotice(fields).join(true, 'E')
},
notice: function (fields: any) {
return buffers.errorOrNotice(fields).join(true, 'N')
},
errorOrNotice: function (fields: any) {
fields = fields || []
const buf = new BufferList()
fields.forEach(function (field: any) {
buf.addChar(field.type)
buf.addCString(field.value)
})
return buf.add(Buffer.from([0])) // terminator
},
parseComplete: function () {
return new BufferList().join(true, '1')
},
bindComplete: function () {
return new BufferList().join(true, '2')
},
notification: function (id: number, channel: string, payload: string) {
return new BufferList().addInt32(id).addCString(channel).addCString(payload).join(true, 'A')
},
emptyQuery: function () {
return new BufferList().join(true, 'I')
},
portalSuspended: function () {
return new BufferList().join(true, 's')
},
closeComplete: function () {
return new BufferList().join(true, '3')
},
copyIn: function (cols: number) {
const list = new BufferList()
// text mode
.addByte(0)
// column count
.addInt16(cols)
for (let i = 0; i < cols; i++) {
list.addInt16(i)
}
return list.join(true, 'G')
},
copyOut: function (cols: number) {
const list = new BufferList()
// text mode
.addByte(0)
// column count
.addInt16(cols)
for (let i = 0; i < cols; i++) {
list.addInt16(i)
}
return list.join(true, 'H')
},
copyData: function (bytes: Buffer) {
return new BufferList().add(bytes).join(true, 'd')
},
copyDone: function () {
return new BufferList().join(true, 'c')
},
}
export default buffers

1
node_modules/pg-protocol/src/types/chunky.d.ts generated vendored Normal file
View File

@ -0,0 +1 @@
declare module 'chunky'

7
node_modules/pg-types/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,7 @@
language: node_js
node_js:
- '4'
- 'lts/*'
- 'node'
env:
- PGUSER=postgres

14
node_modules/pg-types/Makefile generated vendored Normal file
View File

@ -0,0 +1,14 @@
.PHONY: publish-patch test
test:
npm test
patch: test
npm version patch -m "Bump version"
git push origin master --tags
npm publish
minor: test
npm version minor -m "Bump version"
git push origin master --tags
npm publish

75
node_modules/pg-types/README.md generated vendored Normal file
View File

@ -0,0 +1,75 @@
# pg-types
This is the code that turns all the raw text from postgres into JavaScript types for [node-postgres](https://github.com/brianc/node-postgres.git)
## use
This module is consumed and exported from the root `pg` object of node-postgres. To access it, do the following:
```js
var types = require('pg').types
```
Generally what you'll want to do is override how a specific data-type is parsed and turned into a JavaScript type. By default the PostgreSQL backend server returns everything as strings. Every data type corresponds to a unique `OID` within the server, and these `OIDs` are sent back with the query response. So, you need to match a particluar `OID` to a function you'd like to use to take the raw text input and produce a valid JavaScript object as a result. `null` values are never parsed.
Let's do something I commonly like to do on projects: return 64-bit integers `(int8)` as JavaScript integers. Because JavaScript doesn't have support for 64-bit integers node-postgres cannot confidently parse `int8` data type results as numbers because if you have a _huge_ number it will overflow and the result you'd get back from node-postgres would not be the result in the datbase. That would be a __very bad thing__ so node-postgres just returns `int8` results as strings and leaves the parsing up to you. Let's say that you know you don't and wont ever have numbers greater than `int4` in your database, but you're tired of recieving results from the `COUNT(*)` function as strings (because that function returns `int8`). You would do this:
```js
var types = require('pg').types
types.setTypeParser(20, function(val) {
return parseInt(val)
})
```
__boom__: now you get numbers instead of strings.
Just as another example -- not saying this is a good idea -- let's say you want to return all dates from your database as [moment](http://momentjs.com/docs/) objects. Okay, do this:
```js
var types = require('pg').types
var moment = require('moment')
var parseFn = function(val) {
return val === null ? null : moment(val)
}
types.setTypeParser(types.builtins.TIMESTAMPTZ, parseFn)
types.setTypeParser(types.builtins.TIMESTAMP, parseFn)
```
_note: I've never done that with my dates, and I'm not 100% sure moment can parse all the date strings returned from postgres. It's just an example!_
If you're thinking "gee, this seems pretty handy, but how can I get a list of all the OIDs in the database and what they correspond to?!?!?!" worry not:
```bash
$ psql -c "select typname, oid, typarray from pg_type order by oid"
```
If you want to find out the OID of a specific type:
```bash
$ psql -c "select typname, oid, typarray from pg_type where typname = 'daterange' order by oid"
```
:smile:
## license
The MIT License (MIT)
Copyright (c) 2014 Brian M. Carlson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

137
node_modules/pg-types/index.d.ts generated vendored Normal file
View File

@ -0,0 +1,137 @@
export enum TypeId {
BOOL = 16,
BYTEA = 17,
CHAR = 18,
INT8 = 20,
INT2 = 21,
INT4 = 23,
REGPROC = 24,
TEXT = 25,
OID = 26,
TID = 27,
XID = 28,
CID = 29,
JSON = 114,
XML = 142,
PG_NODE_TREE = 194,
SMGR = 210,
PATH = 602,
POLYGON = 604,
CIDR = 650,
FLOAT4 = 700,
FLOAT8 = 701,
ABSTIME = 702,
RELTIME = 703,
TINTERVAL = 704,
CIRCLE = 718,
MACADDR8 = 774,
MONEY = 790,
MACADDR = 829,
INET = 869,
ACLITEM = 1033,
BPCHAR = 1042,
VARCHAR = 1043,
DATE = 1082,
TIME = 1083,
TIMESTAMP = 1114,
TIMESTAMPTZ = 1184,
INTERVAL = 1186,
TIMETZ = 1266,
BIT = 1560,
VARBIT = 1562,
NUMERIC = 1700,
REFCURSOR = 1790,
REGPROCEDURE = 2202,
REGOPER = 2203,
REGOPERATOR = 2204,
REGCLASS = 2205,
REGTYPE = 2206,
UUID = 2950,
TXID_SNAPSHOT = 2970,
PG_LSN = 3220,
PG_NDISTINCT = 3361,
PG_DEPENDENCIES = 3402,
TSVECTOR = 3614,
TSQUERY = 3615,
GTSVECTOR = 3642,
REGCONFIG = 3734,
REGDICTIONARY = 3769,
JSONB = 3802,
REGNAMESPACE = 4089,
REGROLE = 4096
}
export type builtinsTypes =
'BOOL' |
'BYTEA' |
'CHAR' |
'INT8' |
'INT2' |
'INT4' |
'REGPROC' |
'TEXT' |
'OID' |
'TID' |
'XID' |
'CID' |
'JSON' |
'XML' |
'PG_NODE_TREE' |
'SMGR' |
'PATH' |
'POLYGON' |
'CIDR' |
'FLOAT4' |
'FLOAT8' |
'ABSTIME' |
'RELTIME' |
'TINTERVAL' |
'CIRCLE' |
'MACADDR8' |
'MONEY' |
'MACADDR' |
'INET' |
'ACLITEM' |
'BPCHAR' |
'VARCHAR' |
'DATE' |
'TIME' |
'TIMESTAMP' |
'TIMESTAMPTZ' |
'INTERVAL' |
'TIMETZ' |
'BIT' |
'VARBIT' |
'NUMERIC' |
'REFCURSOR' |
'REGPROCEDURE' |
'REGOPER' |
'REGOPERATOR' |
'REGCLASS' |
'REGTYPE' |
'UUID' |
'TXID_SNAPSHOT' |
'PG_LSN' |
'PG_NDISTINCT' |
'PG_DEPENDENCIES' |
'TSVECTOR' |
'TSQUERY' |
'GTSVECTOR' |
'REGCONFIG' |
'REGDICTIONARY' |
'JSONB' |
'REGNAMESPACE' |
'REGROLE';
export type TypesBuiltins = {[key in builtinsTypes]: TypeId};
export type TypeFormat = 'text' | 'binary';
export const builtins: TypesBuiltins;
export function setTypeParser (id: TypeId, parseFn: ((value: string) => any)): void;
export function setTypeParser (id: TypeId, format: TypeFormat, parseFn: (value: string) => any): void;
export const getTypeParser: (id: TypeId, format?: TypeFormat) => any
export const arrayParser: (source: string, transform: (entry: any) => any) => any[];

47
node_modules/pg-types/index.js generated vendored Normal file
View File

@ -0,0 +1,47 @@
var textParsers = require('./lib/textParsers');
var binaryParsers = require('./lib/binaryParsers');
var arrayParser = require('./lib/arrayParser');
var builtinTypes = require('./lib/builtins');
exports.getTypeParser = getTypeParser;
exports.setTypeParser = setTypeParser;
exports.arrayParser = arrayParser;
exports.builtins = builtinTypes;
var typeParsers = {
text: {},
binary: {}
};
//the empty parse function
function noParse (val) {
return String(val);
};
//returns a function used to convert a specific type (specified by
//oid) into a result javascript type
//note: the oid can be obtained via the following sql query:
//SELECT oid FROM pg_type WHERE typname = 'TYPE_NAME_HERE';
function getTypeParser (oid, format) {
format = format || 'text';
if (!typeParsers[format]) {
return noParse;
}
return typeParsers[format][oid] || noParse;
};
function setTypeParser (oid, format, parseFn) {
if(typeof format == 'function') {
parseFn = format;
format = 'text';
}
typeParsers[format][oid] = parseFn;
};
textParsers.init(function(oid, converter) {
typeParsers.text[oid] = converter;
});
binaryParsers.init(function(oid, converter) {
typeParsers.binary[oid] = converter;
});

21
node_modules/pg-types/index.test-d.ts generated vendored Normal file
View File

@ -0,0 +1,21 @@
import * as types from '.';
import { expectType } from 'tsd';
// builtins
expectType<types.TypesBuiltins>(types.builtins);
// getTypeParser
const noParse = types.getTypeParser(types.builtins.NUMERIC, 'text');
const numericParser = types.getTypeParser(types.builtins.NUMERIC, 'binary');
expectType<string>(noParse('noParse'));
expectType<number>(numericParser([200, 1, 0, 15]));
// getArrayParser
const value = types.arrayParser('{1,2,3}', (num) => parseInt(num));
expectType<number[]>(value);
//setTypeParser
types.setTypeParser(types.builtins.INT8, parseInt);
types.setTypeParser(types.builtins.FLOAT8, parseFloat);
types.setTypeParser(types.builtins.FLOAT8, 'binary', (data) => data[0]);
types.setTypeParser(types.builtins.FLOAT8, 'text', parseFloat);

11
node_modules/pg-types/lib/arrayParser.js generated vendored Normal file
View File

@ -0,0 +1,11 @@
var array = require('postgres-array');
module.exports = {
create: function (source, transform) {
return {
parse: function() {
return array.parse(source, transform);
}
};
}
};

257
node_modules/pg-types/lib/binaryParsers.js generated vendored Normal file
View File

@ -0,0 +1,257 @@
var parseInt64 = require('pg-int8');
var parseBits = function(data, bits, offset, invert, callback) {
offset = offset || 0;
invert = invert || false;
callback = callback || function(lastValue, newValue, bits) { return (lastValue * Math.pow(2, bits)) + newValue; };
var offsetBytes = offset >> 3;
var inv = function(value) {
if (invert) {
return ~value & 0xff;
}
return value;
};
// read first (maybe partial) byte
var mask = 0xff;
var firstBits = 8 - (offset % 8);
if (bits < firstBits) {
mask = (0xff << (8 - bits)) & 0xff;
firstBits = bits;
}
if (offset) {
mask = mask >> (offset % 8);
}
var result = 0;
if ((offset % 8) + bits >= 8) {
result = callback(0, inv(data[offsetBytes]) & mask, firstBits);
}
// read bytes
var bytes = (bits + offset) >> 3;
for (var i = offsetBytes + 1; i < bytes; i++) {
result = callback(result, inv(data[i]), 8);
}
// bits to read, that are not a complete byte
var lastBits = (bits + offset) % 8;
if (lastBits > 0) {
result = callback(result, inv(data[bytes]) >> (8 - lastBits), lastBits);
}
return result;
};
var parseFloatFromBits = function(data, precisionBits, exponentBits) {
var bias = Math.pow(2, exponentBits - 1) - 1;
var sign = parseBits(data, 1);
var exponent = parseBits(data, exponentBits, 1);
if (exponent === 0) {
return 0;
}
// parse mantissa
var precisionBitsCounter = 1;
var parsePrecisionBits = function(lastValue, newValue, bits) {
if (lastValue === 0) {
lastValue = 1;
}
for (var i = 1; i <= bits; i++) {
precisionBitsCounter /= 2;
if ((newValue & (0x1 << (bits - i))) > 0) {
lastValue += precisionBitsCounter;
}
}
return lastValue;
};
var mantissa = parseBits(data, precisionBits, exponentBits + 1, false, parsePrecisionBits);
// special cases
if (exponent == (Math.pow(2, exponentBits + 1) - 1)) {
if (mantissa === 0) {
return (sign === 0) ? Infinity : -Infinity;
}
return NaN;
}
// normale number
return ((sign === 0) ? 1 : -1) * Math.pow(2, exponent - bias) * mantissa;
};
var parseInt16 = function(value) {
if (parseBits(value, 1) == 1) {
return -1 * (parseBits(value, 15, 1, true) + 1);
}
return parseBits(value, 15, 1);
};
var parseInt32 = function(value) {
if (parseBits(value, 1) == 1) {
return -1 * (parseBits(value, 31, 1, true) + 1);
}
return parseBits(value, 31, 1);
};
var parseFloat32 = function(value) {
return parseFloatFromBits(value, 23, 8);
};
var parseFloat64 = function(value) {
return parseFloatFromBits(value, 52, 11);
};
var parseNumeric = function(value) {
var sign = parseBits(value, 16, 32);
if (sign == 0xc000) {
return NaN;
}
var weight = Math.pow(10000, parseBits(value, 16, 16));
var result = 0;
var digits = [];
var ndigits = parseBits(value, 16);
for (var i = 0; i < ndigits; i++) {
result += parseBits(value, 16, 64 + (16 * i)) * weight;
weight /= 10000;
}
var scale = Math.pow(10, parseBits(value, 16, 48));
return ((sign === 0) ? 1 : -1) * Math.round(result * scale) / scale;
};
var parseDate = function(isUTC, value) {
var sign = parseBits(value, 1);
var rawValue = parseBits(value, 63, 1);
// discard usecs and shift from 2000 to 1970
var result = new Date((((sign === 0) ? 1 : -1) * rawValue / 1000) + 946684800000);
if (!isUTC) {
result.setTime(result.getTime() + result.getTimezoneOffset() * 60000);
}
// add microseconds to the date
result.usec = rawValue % 1000;
result.getMicroSeconds = function() {
return this.usec;
};
result.setMicroSeconds = function(value) {
this.usec = value;
};
result.getUTCMicroSeconds = function() {
return this.usec;
};
return result;
};
var parseArray = function(value) {
var dim = parseBits(value, 32);
var flags = parseBits(value, 32, 32);
var elementType = parseBits(value, 32, 64);
var offset = 96;
var dims = [];
for (var i = 0; i < dim; i++) {
// parse dimension
dims[i] = parseBits(value, 32, offset);
offset += 32;
// ignore lower bounds
offset += 32;
}
var parseElement = function(elementType) {
// parse content length
var length = parseBits(value, 32, offset);
offset += 32;
// parse null values
if (length == 0xffffffff) {
return null;
}
var result;
if ((elementType == 0x17) || (elementType == 0x14)) {
// int/bigint
result = parseBits(value, length * 8, offset);
offset += length * 8;
return result;
}
else if (elementType == 0x19) {
// string
result = value.toString(this.encoding, offset >> 3, (offset += (length << 3)) >> 3);
return result;
}
else {
console.log("ERROR: ElementType not implemented: " + elementType);
}
};
var parse = function(dimension, elementType) {
var array = [];
var i;
if (dimension.length > 1) {
var count = dimension.shift();
for (i = 0; i < count; i++) {
array[i] = parse(dimension, elementType);
}
dimension.unshift(count);
}
else {
for (i = 0; i < dimension[0]; i++) {
array[i] = parseElement(elementType);
}
}
return array;
};
return parse(dims, elementType);
};
var parseText = function(value) {
return value.toString('utf8');
};
var parseBool = function(value) {
if(value === null) return null;
return (parseBits(value, 8) > 0);
};
var init = function(register) {
register(20, parseInt64);
register(21, parseInt16);
register(23, parseInt32);
register(26, parseInt32);
register(1700, parseNumeric);
register(700, parseFloat32);
register(701, parseFloat64);
register(16, parseBool);
register(1114, parseDate.bind(null, false));
register(1184, parseDate.bind(null, true));
register(1000, parseArray);
register(1007, parseArray);
register(1016, parseArray);
register(1008, parseArray);
register(1009, parseArray);
register(25, parseText);
};
module.exports = {
init: init
};

73
node_modules/pg-types/lib/builtins.js generated vendored Normal file
View File

@ -0,0 +1,73 @@
/**
* Following query was used to generate this file:
SELECT json_object_agg(UPPER(PT.typname), PT.oid::int4 ORDER BY pt.oid)
FROM pg_type PT
WHERE typnamespace = (SELECT pgn.oid FROM pg_namespace pgn WHERE nspname = 'pg_catalog') -- Take only builting Postgres types with stable OID (extension types are not guaranted to be stable)
AND typtype = 'b' -- Only basic types
AND typelem = 0 -- Ignore aliases
AND typisdefined -- Ignore undefined types
*/
module.exports = {
BOOL: 16,
BYTEA: 17,
CHAR: 18,
INT8: 20,
INT2: 21,
INT4: 23,
REGPROC: 24,
TEXT: 25,
OID: 26,
TID: 27,
XID: 28,
CID: 29,
JSON: 114,
XML: 142,
PG_NODE_TREE: 194,
SMGR: 210,
PATH: 602,
POLYGON: 604,
CIDR: 650,
FLOAT4: 700,
FLOAT8: 701,
ABSTIME: 702,
RELTIME: 703,
TINTERVAL: 704,
CIRCLE: 718,
MACADDR8: 774,
MONEY: 790,
MACADDR: 829,
INET: 869,
ACLITEM: 1033,
BPCHAR: 1042,
VARCHAR: 1043,
DATE: 1082,
TIME: 1083,
TIMESTAMP: 1114,
TIMESTAMPTZ: 1184,
INTERVAL: 1186,
TIMETZ: 1266,
BIT: 1560,
VARBIT: 1562,
NUMERIC: 1700,
REFCURSOR: 1790,
REGPROCEDURE: 2202,
REGOPER: 2203,
REGOPERATOR: 2204,
REGCLASS: 2205,
REGTYPE: 2206,
UUID: 2950,
TXID_SNAPSHOT: 2970,
PG_LSN: 3220,
PG_NDISTINCT: 3361,
PG_DEPENDENCIES: 3402,
TSVECTOR: 3614,
TSQUERY: 3615,
GTSVECTOR: 3642,
REGCONFIG: 3734,
REGDICTIONARY: 3769,
JSONB: 3802,
REGNAMESPACE: 4089,
REGROLE: 4096
};

215
node_modules/pg-types/lib/textParsers.js generated vendored Normal file
View File

@ -0,0 +1,215 @@
var array = require('postgres-array')
var arrayParser = require('./arrayParser');
var parseDate = require('postgres-date');
var parseInterval = require('postgres-interval');
var parseByteA = require('postgres-bytea');
function allowNull (fn) {
return function nullAllowed (value) {
if (value === null) return value
return fn(value)
}
}
function parseBool (value) {
if (value === null) return value
return value === 'TRUE' ||
value === 't' ||
value === 'true' ||
value === 'y' ||
value === 'yes' ||
value === 'on' ||
value === '1';
}
function parseBoolArray (value) {
if (!value) return null
return array.parse(value, parseBool)
}
function parseBaseTenInt (string) {
return parseInt(string, 10)
}
function parseIntegerArray (value) {
if (!value) return null
return array.parse(value, allowNull(parseBaseTenInt))
}
function parseBigIntegerArray (value) {
if (!value) return null
return array.parse(value, allowNull(function (entry) {
return parseBigInteger(entry).trim()
}))
}
var parsePointArray = function(value) {
if(!value) { return null; }
var p = arrayParser.create(value, function(entry) {
if(entry !== null) {
entry = parsePoint(entry);
}
return entry;
});
return p.parse();
};
var parseFloatArray = function(value) {
if(!value) { return null; }
var p = arrayParser.create(value, function(entry) {
if(entry !== null) {
entry = parseFloat(entry);
}
return entry;
});
return p.parse();
};
var parseStringArray = function(value) {
if(!value) { return null; }
var p = arrayParser.create(value);
return p.parse();
};
var parseDateArray = function(value) {
if (!value) { return null; }
var p = arrayParser.create(value, function(entry) {
if (entry !== null) {
entry = parseDate(entry);
}
return entry;
});
return p.parse();
};
var parseIntervalArray = function(value) {
if (!value) { return null; }
var p = arrayParser.create(value, function(entry) {
if (entry !== null) {
entry = parseInterval(entry);
}
return entry;
});
return p.parse();
};
var parseByteAArray = function(value) {
if (!value) { return null; }
return array.parse(value, allowNull(parseByteA));
};
var parseInteger = function(value) {
return parseInt(value, 10);
};
var parseBigInteger = function(value) {
var valStr = String(value);
if (/^\d+$/.test(valStr)) { return valStr; }
return value;
};
var parseJsonArray = function(value) {
if (!value) { return null; }
return array.parse(value, allowNull(JSON.parse));
};
var parsePoint = function(value) {
if (value[0] !== '(') { return null; }
value = value.substring( 1, value.length - 1 ).split(',');
return {
x: parseFloat(value[0])
, y: parseFloat(value[1])
};
};
var parseCircle = function(value) {
if (value[0] !== '<' && value[1] !== '(') { return null; }
var point = '(';
var radius = '';
var pointParsed = false;
for (var i = 2; i < value.length - 1; i++){
if (!pointParsed) {
point += value[i];
}
if (value[i] === ')') {
pointParsed = true;
continue;
} else if (!pointParsed) {
continue;
}
if (value[i] === ','){
continue;
}
radius += value[i];
}
var result = parsePoint(point);
result.radius = parseFloat(radius);
return result;
};
var init = function(register) {
register(20, parseBigInteger); // int8
register(21, parseInteger); // int2
register(23, parseInteger); // int4
register(26, parseInteger); // oid
register(700, parseFloat); // float4/real
register(701, parseFloat); // float8/double
register(16, parseBool);
register(1082, parseDate); // date
register(1114, parseDate); // timestamp without timezone
register(1184, parseDate); // timestamp
register(600, parsePoint); // point
register(651, parseStringArray); // cidr[]
register(718, parseCircle); // circle
register(1000, parseBoolArray);
register(1001, parseByteAArray);
register(1005, parseIntegerArray); // _int2
register(1007, parseIntegerArray); // _int4
register(1028, parseIntegerArray); // oid[]
register(1016, parseBigIntegerArray); // _int8
register(1017, parsePointArray); // point[]
register(1021, parseFloatArray); // _float4
register(1022, parseFloatArray); // _float8
register(1231, parseFloatArray); // _numeric
register(1014, parseStringArray); //char
register(1015, parseStringArray); //varchar
register(1008, parseStringArray);
register(1009, parseStringArray);
register(1040, parseStringArray); // macaddr[]
register(1041, parseStringArray); // inet[]
register(1115, parseDateArray); // timestamp without time zone[]
register(1182, parseDateArray); // _date
register(1185, parseDateArray); // timestamp with time zone[]
register(1186, parseInterval);
register(1187, parseIntervalArray);
register(17, parseByteA);
register(114, JSON.parse.bind(JSON)); // json
register(3802, JSON.parse.bind(JSON)); // jsonb
register(199, parseJsonArray); // json[]
register(3807, parseJsonArray); // jsonb[]
register(3907, parseStringArray); // numrange[]
register(2951, parseStringArray); // uuid[]
register(791, parseStringArray); // money[]
register(1183, parseStringArray); // time[]
register(1270, parseStringArray); // timetz[]
};
module.exports = {
init: init
};

42
node_modules/pg-types/package.json generated vendored Normal file
View File

@ -0,0 +1,42 @@
{
"name": "pg-types",
"version": "2.2.0",
"description": "Query result type converters for node-postgres",
"main": "index.js",
"scripts": {
"test": "tape test/*.js | tap-spec && npm run test-ts",
"test-ts": "if-node-version '>= 8' tsd"
},
"repository": {
"type": "git",
"url": "git://github.com/brianc/node-pg-types.git"
},
"keywords": [
"postgres",
"PostgreSQL",
"pg"
],
"author": "Brian M. Carlson",
"license": "MIT",
"bugs": {
"url": "https://github.com/brianc/node-pg-types/issues"
},
"homepage": "https://github.com/brianc/node-pg-types",
"devDependencies": {
"if-node-version": "^1.1.1",
"pff": "^1.0.0",
"tap-spec": "^4.0.0",
"tape": "^4.0.0",
"tsd": "^0.7.4"
},
"dependencies": {
"pg-int8": "1.0.1",
"postgres-array": "~2.0.0",
"postgres-bytea": "~1.0.0",
"postgres-date": "~1.0.4",
"postgres-interval": "^1.1.0"
},
"engines": {
"node": ">=4"
}
}

24
node_modules/pg-types/test/index.js generated vendored Normal file
View File

@ -0,0 +1,24 @@
var test = require('tape')
var printf = require('pff')
var getTypeParser = require('../').getTypeParser
var types = require('./types')
test('types', function (t) {
Object.keys(types).forEach(function (typeName) {
var type = types[typeName]
t.test(typeName, function (t) {
var parser = getTypeParser(type.id, type.format)
type.tests.forEach(function (tests) {
var input = tests[0]
var expected = tests[1]
var result = parser(input)
if (typeof expected === 'function') {
return expected(t, result)
}
t.equal(result, expected)
})
t.end()
})
})
})

597
node_modules/pg-types/test/types.js generated vendored Normal file
View File

@ -0,0 +1,597 @@
'use strict'
exports['string/varchar'] = {
format: 'text',
id: 1043,
tests: [
['bang', 'bang']
]
}
exports['integer/int4'] = {
format: 'text',
id: 23,
tests: [
['2147483647', 2147483647]
]
}
exports['smallint/int2'] = {
format: 'text',
id: 21,
tests: [
['32767', 32767]
]
}
exports['bigint/int8'] = {
format: 'text',
id: 20,
tests: [
['9223372036854775807', '9223372036854775807']
]
}
exports.oid = {
format: 'text',
id: 26,
tests: [
['103', 103]
]
}
var bignum = '31415926535897932384626433832795028841971693993751058.16180339887498948482045868343656381177203091798057628'
exports.numeric = {
format: 'text',
id: 1700,
tests: [
[bignum, bignum]
]
}
exports['real/float4'] = {
format: 'text',
id: 700,
tests: [
['123.456', 123.456]
]
}
exports['double precision / float 8'] = {
format: 'text',
id: 701,
tests: [
['12345678.12345678', 12345678.12345678]
]
}
exports.boolean = {
format: 'text',
id: 16,
tests: [
['TRUE', true],
['t', true],
['true', true],
['y', true],
['yes', true],
['on', true],
['1', true],
['f', false],
[null, null]
]
}
exports.timestamptz = {
format: 'text',
id: 1184,
tests: [
[
'2010-10-31 14:54:13.74-05:30',
dateEquals(2010, 9, 31, 20, 24, 13, 740)
],
[
'2011-01-23 22:05:00.68-06',
dateEquals(2011, 0, 24, 4, 5, 0, 680)
],
[
'2010-10-30 14:11:12.730838Z',
dateEquals(2010, 9, 30, 14, 11, 12, 730)
],
[
'2010-10-30 13:10:01+05',
dateEquals(2010, 9, 30, 8, 10, 1, 0)
]
]
}
exports.timestamp = {
format: 'text',
id: 1114,
tests: [
[
'2010-10-31 00:00:00',
function (t, value) {
t.equal(
value.toUTCString(),
new Date(2010, 9, 31, 0, 0, 0, 0, 0).toUTCString()
)
t.equal(
value.toString(),
new Date(2010, 9, 31, 0, 0, 0, 0, 0, 0).toString()
)
}
]
]
}
exports.date = {
format: 'text',
id: 1082,
tests: [
['2010-10-31', function (t, value) {
var now = new Date(2010, 9, 31)
dateEquals(
2010,
now.getUTCMonth(),
now.getUTCDate(),
now.getUTCHours(), 0, 0, 0)(t, value)
t.equal(value.getHours(), now.getHours())
}]
]
}
exports.inet = {
format: 'text',
id: 869,
tests: [
['8.8.8.8', '8.8.8.8'],
['2001:4860:4860::8888', '2001:4860:4860::8888'],
['127.0.0.1', '127.0.0.1'],
['fd00:1::40e', 'fd00:1::40e'],
['1.2.3.4', '1.2.3.4']
]
}
exports.cidr = {
format: 'text',
id: 650,
tests: [
['172.16.0.0/12', '172.16.0.0/12'],
['fe80::/10', 'fe80::/10'],
['fc00::/7', 'fc00::/7'],
['192.168.0.0/24', '192.168.0.0/24'],
['10.0.0.0/8', '10.0.0.0/8']
]
}
exports.macaddr = {
format: 'text',
id: 829,
tests: [
['08:00:2b:01:02:03', '08:00:2b:01:02:03'],
['16:10:9f:0d:66:00', '16:10:9f:0d:66:00']
]
}
exports.numrange = {
format: 'text',
id: 3906,
tests: [
['[,]', '[,]'],
['(,)', '(,)'],
['(,]', '(,]'],
['[1,)', '[1,)'],
['[,1]', '[,1]'],
['(1,2)', '(1,2)'],
['(1,20.5]', '(1,20.5]']
]
}
exports.interval = {
format: 'text',
id: 1186,
tests: [
['01:02:03', function (t, value) {
t.equal(value.toPostgres(), '3 seconds 2 minutes 1 hours')
t.deepEqual(value, {hours: 1, minutes: 2, seconds: 3})
}],
['01:02:03.456', function (t, value) {
t.deepEqual(value, {hours: 1, minutes:2, seconds: 3, milliseconds: 456})
}],
['1 year -32 days', function (t, value) {
t.equal(value.toPostgres(), '-32 days 1 years')
t.deepEqual(value, {years: 1, days: -32})
}],
['1 day -00:00:03', function (t, value) {
t.equal(value.toPostgres(), '-3 seconds 1 days')
t.deepEqual(value, {days: 1, seconds: -3})
}]
]
}
exports.bytea = {
format: 'text',
id: 17,
tests: [
['foo\\000\\200\\\\\\377', function (t, value) {
var buffer = new Buffer([102, 111, 111, 0, 128, 92, 255])
t.ok(buffer.equals(value))
}],
['', function (t, value) {
var buffer = new Buffer(0)
t.ok(buffer.equals(value))
}]
]
}
exports['array/boolean'] = {
format: 'text',
id: 1000,
tests: [
['{true,false}', function (t, value) {
t.deepEqual(value, [true, false])
}]
]
}
exports['array/char'] = {
format: 'text',
id: 1014,
tests: [
['{foo,bar}', function (t, value) {
t.deepEqual(value, ['foo', 'bar'])
}]
]
}
exports['array/varchar'] = {
format: 'text',
id: 1015,
tests: [
['{foo,bar}', function (t, value) {
t.deepEqual(value, ['foo', 'bar'])
}]
]
}
exports['array/text'] = {
format: 'text',
id: 1008,
tests: [
['{foo}', function (t, value) {
t.deepEqual(value, ['foo'])
}]
]
}
exports['array/bytea'] = {
format: 'text',
id: 1001,
tests: [
['{"\\\\x00000000"}', function (t, value) {
var buffer = new Buffer('00000000', 'hex')
t.ok(Array.isArray(value))
t.equal(value.length, 1)
t.ok(buffer.equals(value[0]))
}],
['{NULL,"\\\\x4e554c4c"}', function (t, value) {
var buffer = new Buffer('4e554c4c', 'hex')
t.ok(Array.isArray(value))
t.equal(value.length, 2)
t.equal(value[0], null)
t.ok(buffer.equals(value[1]))
}],
]
}
exports['array/numeric'] = {
format: 'text',
id: 1231,
tests: [
['{1.2,3.4}', function (t, value) {
t.deepEqual(value, [1.2, 3.4])
}]
]
}
exports['array/int2'] = {
format: 'text',
id: 1005,
tests: [
['{-32768, -32767, 32766, 32767}', function (t, value) {
t.deepEqual(value, [-32768, -32767, 32766, 32767])
}]
]
}
exports['array/int4'] = {
format: 'text',
id: 1005,
tests: [
['{-2147483648, -2147483647, 2147483646, 2147483647}', function (t, value) {
t.deepEqual(value, [-2147483648, -2147483647, 2147483646, 2147483647])
}]
]
}
exports['array/int8'] = {
format: 'text',
id: 1016,
tests: [
[
'{-9223372036854775808, -9223372036854775807, 9223372036854775806, 9223372036854775807}',
function (t, value) {
t.deepEqual(value, [
'-9223372036854775808',
'-9223372036854775807',
'9223372036854775806',
'9223372036854775807'
])
}
]
]
}
exports['array/json'] = {
format: 'text',
id: 199,
tests: [
[
'{{1,2},{[3],"[4,5]"},{null,NULL}}',
function (t, value) {
t.deepEqual(value, [
[1, 2],
[[3], [4, 5]],
[null, null],
])
}
]
]
}
exports['array/jsonb'] = {
format: 'text',
id: 3807,
tests: exports['array/json'].tests
}
exports['array/point'] = {
format: 'text',
id: 1017,
tests: [
['{"(25.1,50.5)","(10.1,40)"}', function (t, value) {
t.deepEqual(value, [{x: 25.1, y: 50.5}, {x: 10.1, y: 40}])
}]
]
}
exports['array/oid'] = {
format: 'text',
id: 1028,
tests: [
['{25864,25860}', function (t, value) {
t.deepEqual(value, [25864, 25860])
}]
]
}
exports['array/float4'] = {
format: 'text',
id: 1021,
tests: [
['{1.2, 3.4}', function (t, value) {
t.deepEqual(value, [1.2, 3.4])
}]
]
}
exports['array/float8'] = {
format: 'text',
id: 1022,
tests: [
['{-12345678.1234567, 12345678.12345678}', function (t, value) {
t.deepEqual(value, [-12345678.1234567, 12345678.12345678])
}]
]
}
exports['array/date'] = {
format: 'text',
id: 1182,
tests: [
['{2014-01-01,2015-12-31}', function (t, value) {
var expecteds = [new Date(2014, 0, 1), new Date(2015, 11, 31)]
t.equal(value.length, 2)
value.forEach(function (date, index) {
var expected = expecteds[index]
dateEquals(
expected.getUTCFullYear(),
expected.getUTCMonth(),
expected.getUTCDate(),
expected.getUTCHours(), 0, 0, 0)(t, date)
})
}]
]
}
exports['array/interval'] = {
format: 'text',
id: 1187,
tests: [
['{01:02:03,1 day -00:00:03}', function (t, value) {
var expecteds = [{hours: 1, minutes: 2, seconds: 3},
{days: 1, seconds: -3}]
t.equal(value.length, 2)
t.deepEqual(value, expecteds);
}]
]
}
exports['array/inet'] = {
format: 'text',
id: 1041,
tests: [
['{8.8.8.8}', function (t, value) {
t.deepEqual(value, ['8.8.8.8']);
}],
['{2001:4860:4860::8888}', function (t, value) {
t.deepEqual(value, ['2001:4860:4860::8888']);
}],
['{127.0.0.1,fd00:1::40e,1.2.3.4}', function (t, value) {
t.deepEqual(value, ['127.0.0.1', 'fd00:1::40e', '1.2.3.4']);
}]
]
}
exports['array/cidr'] = {
format: 'text',
id: 651,
tests: [
['{172.16.0.0/12}', function (t, value) {
t.deepEqual(value, ['172.16.0.0/12']);
}],
['{fe80::/10}', function (t, value) {
t.deepEqual(value, ['fe80::/10']);
}],
['{10.0.0.0/8,fc00::/7,192.168.0.0/24}', function (t, value) {
t.deepEqual(value, ['10.0.0.0/8', 'fc00::/7', '192.168.0.0/24']);
}]
]
}
exports['array/macaddr'] = {
format: 'text',
id: 1040,
tests: [
['{08:00:2b:01:02:03,16:10:9f:0d:66:00}', function (t, value) {
t.deepEqual(value, ['08:00:2b:01:02:03', '16:10:9f:0d:66:00']);
}]
]
}
exports['array/numrange'] = {
format: 'text',
id: 3907,
tests: [
['{"[1,2]","(4.5,8)","[10,40)","(-21.2,60.3]"}', function (t, value) {
t.deepEqual(value, ['[1,2]', '(4.5,8)', '[10,40)', '(-21.2,60.3]']);
}],
['{"[,20]","[3,]","[,]","(,35)","(1,)","(,)"}', function (t, value) {
t.deepEqual(value, ['[,20]', '[3,]', '[,]', '(,35)', '(1,)', '(,)']);
}],
['{"[,20)","[3,)","[,)","[,35)","[1,)","[,)"}', function (t, value) {
t.deepEqual(value, ['[,20)', '[3,)', '[,)', '[,35)', '[1,)', '[,)']);
}]
]
}
exports['binary-string/varchar'] = {
format: 'binary',
id: 1043,
tests: [
['bang', 'bang']
]
}
exports['binary-integer/int4'] = {
format: 'binary',
id: 23,
tests: [
[[0, 0, 0, 100], 100]
]
}
exports['binary-smallint/int2'] = {
format: 'binary',
id: 21,
tests: [
[[0, 101], 101]
]
}
exports['binary-bigint/int8'] = {
format: 'binary',
id: 20,
tests: [
[new Buffer([0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]), '9223372036854775807']
]
}
exports['binary-oid'] = {
format: 'binary',
id: 26,
tests: [
[[0, 0, 0, 103], 103]
]
}
exports['binary-numeric'] = {
format: 'binary',
id: 1700,
tests: [
[
[0, 2, 0, 0, 0, 0, 0, hex('0x64'), 0, 12, hex('0xd'), hex('0x48'), 0, 0, 0, 0],
12.34
]
]
}
exports['binary-real/float4'] = {
format: 'binary',
id: 700,
tests: [
[['0x41', '0x48', '0x00', '0x00'].map(hex), 12.5]
]
}
exports['binary-boolean'] = {
format: 'binary',
id: 16,
tests: [
[[1], true],
[[0], false],
[null, null]
]
}
exports['binary-string'] = {
format: 'binary',
id: 25,
tests: [
[
new Buffer(['0x73', '0x6c', '0x61', '0x64', '0x64', '0x61'].map(hex)),
'sladda'
]
]
}
exports.point = {
format: 'text',
id: 600,
tests: [
['(25.1,50.5)', function (t, value) {
t.deepEqual(value, {x: 25.1, y: 50.5})
}]
]
}
exports.circle = {
format: 'text',
id: 718,
tests: [
['<(25,10),5>', function (t, value) {
t.deepEqual(value, {x: 25, y: 10, radius: 5})
}]
]
}
function hex (string) {
return parseInt(string, 16)
}
function dateEquals () {
var timestamp = Date.UTC.apply(Date, arguments)
return function (t, value) {
t.equal(value.toUTCString(), new Date(timestamp).toUTCString())
}
}

21
node_modules/pg/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2010 - 2021 Brian Carlson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

95
node_modules/pg/README.md generated vendored Normal file
View File

@ -0,0 +1,95 @@
# node-postgres
[![Build Status](https://secure.travis-ci.org/brianc/node-postgres.svg?branch=master)](http://travis-ci.org/brianc/node-postgres)
<span class="badge-npmversion"><a href="https://npmjs.org/package/pg" title="View this project on NPM"><img src="https://img.shields.io/npm/v/pg.svg" alt="NPM version" /></a></span>
<span class="badge-npmdownloads"><a href="https://npmjs.org/package/pg" title="View this project on NPM"><img src="https://img.shields.io/npm/dm/pg.svg" alt="NPM downloads" /></a></span>
Non-blocking PostgreSQL client for Node.js. Pure JavaScript and optional native libpq bindings.
## Install
```sh
$ npm install pg
```
---
## :star: [Documentation](https://node-postgres.com) :star:
### Features
- Pure JavaScript client and native libpq bindings share _the same API_
- Connection pooling
- Extensible JS ↔ PostgreSQL data-type coercion
- Supported PostgreSQL features
- Parameterized queries
- Named statements with query plan caching
- Async notifications with `LISTEN/NOTIFY`
- Bulk import & export with `COPY TO/COPY FROM`
### Extras
node-postgres is by design pretty light on abstractions. These are some handy modules we've been using over the years to complete the picture.
The entire list can be found on our [wiki](https://github.com/brianc/node-postgres/wiki/Extras).
## Support
node-postgres is free software. If you encounter a bug with the library please open an issue on the [GitHub repo](https://github.com/brianc/node-postgres). If you have questions unanswered by the documentation please open an issue pointing out how the documentation was unclear & I will do my best to make it better!
When you open an issue please provide:
- version of Node
- version of Postgres
- smallest possible snippet of code to reproduce the problem
You can also follow me [@briancarlson](https://twitter.com/briancarlson) if that's your thing. I try to always announce noteworthy changes & developments with node-postgres on Twitter.
## Sponsorship :two_hearts:
node-postgres's continued development has been made possible in part by generous financial support from [the community](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md).
If you or your company are benefiting from node-postgres and would like to help keep the project financially sustainable [please consider supporting](https://github.com/sponsors/brianc) its development.
### Featured sponsor
Special thanks to [medplum](https://medplum.com) for their generous and thoughtful support of node-postgres!
![medplum](https://raw.githubusercontent.com/medplum/medplum-logo/refs/heads/main/medplum-logo.png)
## Contributing
**:heart: contributions!**
I will **happily** accept your pull request if it:
- **has tests**
- looks reasonable
- does not break backwards compatibility
If your change involves breaking backwards compatibility please please point that out in the pull request & we can discuss & plan when and how to release it and what type of documentation or communicate it will require.
## Troubleshooting and FAQ
The causes and solutions to common errors can be found among the [Frequently Asked Questions (FAQ)](https://github.com/brianc/node-postgres/wiki/FAQ)
## License
Copyright (c) 2010-2020 Brian Carlson (brian.m.carlson@gmail.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

20
node_modules/pg/esm/index.mjs generated vendored Normal file
View File

@ -0,0 +1,20 @@
// ESM wrapper for pg
import pg from '../lib/index.js'
// Re-export all the properties
export const Client = pg.Client
export const Pool = pg.Pool
export const Connection = pg.Connection
export const types = pg.types
export const Query = pg.Query
export const DatabaseError = pg.DatabaseError
export const escapeIdentifier = pg.escapeIdentifier
export const escapeLiteral = pg.escapeLiteral
export const Result = pg.Result
export const TypeOverrides = pg.TypeOverrides
// Also export the defaults
export const defaults = pg.defaults
// Re-export the default
export default pg

650
node_modules/pg/lib/client.js generated vendored Normal file
View File

@ -0,0 +1,650 @@
'use strict'
const EventEmitter = require('events').EventEmitter
const utils = require('./utils')
const sasl = require('./crypto/sasl')
const TypeOverrides = require('./type-overrides')
const ConnectionParameters = require('./connection-parameters')
const Query = require('./query')
const defaults = require('./defaults')
const Connection = require('./connection')
const crypto = require('./crypto/utils')
class Client extends EventEmitter {
constructor(config) {
super()
this.connectionParameters = new ConnectionParameters(config)
this.user = this.connectionParameters.user
this.database = this.connectionParameters.database
this.port = this.connectionParameters.port
this.host = this.connectionParameters.host
// "hiding" the password so it doesn't show up in stack traces
// or if the client is console.logged
Object.defineProperty(this, 'password', {
configurable: true,
enumerable: false,
writable: true,
value: this.connectionParameters.password,
})
this.replication = this.connectionParameters.replication
const c = config || {}
this._Promise = c.Promise || global.Promise
this._types = new TypeOverrides(c.types)
this._ending = false
this._ended = false
this._connecting = false
this._connected = false
this._connectionError = false
this._queryable = true
this.enableChannelBinding = Boolean(c.enableChannelBinding) // set true to use SCRAM-SHA-256-PLUS when offered
this.connection =
c.connection ||
new Connection({
stream: c.stream,
ssl: this.connectionParameters.ssl,
keepAlive: c.keepAlive || false,
keepAliveInitialDelayMillis: c.keepAliveInitialDelayMillis || 0,
encoding: this.connectionParameters.client_encoding || 'utf8',
})
this.queryQueue = []
this.binary = c.binary || defaults.binary
this.processID = null
this.secretKey = null
this.ssl = this.connectionParameters.ssl || false
// As with Password, make SSL->Key (the private key) non-enumerable.
// It won't show up in stack traces
// or if the client is console.logged
if (this.ssl && this.ssl.key) {
Object.defineProperty(this.ssl, 'key', {
enumerable: false,
})
}
this._connectionTimeoutMillis = c.connectionTimeoutMillis || 0
}
_errorAllQueries(err) {
const enqueueError = (query) => {
process.nextTick(() => {
query.handleError(err, this.connection)
})
}
if (this.activeQuery) {
enqueueError(this.activeQuery)
this.activeQuery = null
}
this.queryQueue.forEach(enqueueError)
this.queryQueue.length = 0
}
_connect(callback) {
const self = this
const con = this.connection
this._connectionCallback = callback
if (this._connecting || this._connected) {
const err = new Error('Client has already been connected. You cannot reuse a client.')
process.nextTick(() => {
callback(err)
})
return
}
this._connecting = true
if (this._connectionTimeoutMillis > 0) {
this.connectionTimeoutHandle = setTimeout(() => {
con._ending = true
con.stream.destroy(new Error('timeout expired'))
}, this._connectionTimeoutMillis)
if (this.connectionTimeoutHandle.unref) {
this.connectionTimeoutHandle.unref()
}
}
if (this.host && this.host.indexOf('/') === 0) {
con.connect(this.host + '/.s.PGSQL.' + this.port)
} else {
con.connect(this.port, this.host)
}
// once connection is established send startup message
con.on('connect', function () {
if (self.ssl) {
con.requestSsl()
} else {
con.startup(self.getStartupConf())
}
})
con.on('sslconnect', function () {
con.startup(self.getStartupConf())
})
this._attachListeners(con)
con.once('end', () => {
const error = this._ending ? new Error('Connection terminated') : new Error('Connection terminated unexpectedly')
clearTimeout(this.connectionTimeoutHandle)
this._errorAllQueries(error)
this._ended = true
if (!this._ending) {
// if the connection is ended without us calling .end()
// on this client then we have an unexpected disconnection
// treat this as an error unless we've already emitted an error
// during connection.
if (this._connecting && !this._connectionError) {
if (this._connectionCallback) {
this._connectionCallback(error)
} else {
this._handleErrorEvent(error)
}
} else if (!this._connectionError) {
this._handleErrorEvent(error)
}
}
process.nextTick(() => {
this.emit('end')
})
})
}
connect(callback) {
if (callback) {
this._connect(callback)
return
}
return new this._Promise((resolve, reject) => {
this._connect((error) => {
if (error) {
reject(error)
} else {
resolve()
}
})
})
}
_attachListeners(con) {
// password request handling
con.on('authenticationCleartextPassword', this._handleAuthCleartextPassword.bind(this))
// password request handling
con.on('authenticationMD5Password', this._handleAuthMD5Password.bind(this))
// password request handling (SASL)
con.on('authenticationSASL', this._handleAuthSASL.bind(this))
con.on('authenticationSASLContinue', this._handleAuthSASLContinue.bind(this))
con.on('authenticationSASLFinal', this._handleAuthSASLFinal.bind(this))
con.on('backendKeyData', this._handleBackendKeyData.bind(this))
con.on('error', this._handleErrorEvent.bind(this))
con.on('errorMessage', this._handleErrorMessage.bind(this))
con.on('readyForQuery', this._handleReadyForQuery.bind(this))
con.on('notice', this._handleNotice.bind(this))
con.on('rowDescription', this._handleRowDescription.bind(this))
con.on('dataRow', this._handleDataRow.bind(this))
con.on('portalSuspended', this._handlePortalSuspended.bind(this))
con.on('emptyQuery', this._handleEmptyQuery.bind(this))
con.on('commandComplete', this._handleCommandComplete.bind(this))
con.on('parseComplete', this._handleParseComplete.bind(this))
con.on('copyInResponse', this._handleCopyInResponse.bind(this))
con.on('copyData', this._handleCopyData.bind(this))
con.on('notification', this._handleNotification.bind(this))
}
// TODO(bmc): deprecate pgpass "built in" integration since this.password can be a function
// it can be supplied by the user if required - this is a breaking change!
_checkPgPass(cb) {
const con = this.connection
if (typeof this.password === 'function') {
this._Promise
.resolve()
.then(() => this.password())
.then((pass) => {
if (pass !== undefined) {
if (typeof pass !== 'string') {
con.emit('error', new TypeError('Password must be a string'))
return
}
this.connectionParameters.password = this.password = pass
} else {
this.connectionParameters.password = this.password = null
}
cb()
})
.catch((err) => {
con.emit('error', err)
})
} else if (this.password !== null) {
cb()
} else {
try {
const pgPass = require('pgpass')
pgPass(this.connectionParameters, (pass) => {
if (undefined !== pass) {
this.connectionParameters.password = this.password = pass
}
cb()
})
} catch (e) {
this.emit('error', e)
}
}
}
_handleAuthCleartextPassword(msg) {
this._checkPgPass(() => {
this.connection.password(this.password)
})
}
_handleAuthMD5Password(msg) {
this._checkPgPass(async () => {
try {
const hashedPassword = await crypto.postgresMd5PasswordHash(this.user, this.password, msg.salt)
this.connection.password(hashedPassword)
} catch (e) {
this.emit('error', e)
}
})
}
_handleAuthSASL(msg) {
this._checkPgPass(() => {
try {
this.saslSession = sasl.startSession(msg.mechanisms, this.enableChannelBinding && this.connection.stream)
this.connection.sendSASLInitialResponseMessage(this.saslSession.mechanism, this.saslSession.response)
} catch (err) {
this.connection.emit('error', err)
}
})
}
async _handleAuthSASLContinue(msg) {
try {
await sasl.continueSession(
this.saslSession,
this.password,
msg.data,
this.enableChannelBinding && this.connection.stream
)
this.connection.sendSCRAMClientFinalMessage(this.saslSession.response)
} catch (err) {
this.connection.emit('error', err)
}
}
_handleAuthSASLFinal(msg) {
try {
sasl.finalizeSession(this.saslSession, msg.data)
this.saslSession = null
} catch (err) {
this.connection.emit('error', err)
}
}
_handleBackendKeyData(msg) {
this.processID = msg.processID
this.secretKey = msg.secretKey
}
_handleReadyForQuery(msg) {
if (this._connecting) {
this._connecting = false
this._connected = true
clearTimeout(this.connectionTimeoutHandle)
// process possible callback argument to Client#connect
if (this._connectionCallback) {
this._connectionCallback(null, this)
// remove callback for proper error handling
// after the connect event
this._connectionCallback = null
}
this.emit('connect')
}
const { activeQuery } = this
this.activeQuery = null
this.readyForQuery = true
if (activeQuery) {
activeQuery.handleReadyForQuery(this.connection)
}
this._pulseQueryQueue()
}
// if we receive an error event or error message
// during the connection process we handle it here
_handleErrorWhileConnecting(err) {
if (this._connectionError) {
// TODO(bmc): this is swallowing errors - we shouldn't do this
return
}
this._connectionError = true
clearTimeout(this.connectionTimeoutHandle)
if (this._connectionCallback) {
return this._connectionCallback(err)
}
this.emit('error', err)
}
// if we're connected and we receive an error event from the connection
// this means the socket is dead - do a hard abort of all queries and emit
// the socket error on the client as well
_handleErrorEvent(err) {
if (this._connecting) {
return this._handleErrorWhileConnecting(err)
}
this._queryable = false
this._errorAllQueries(err)
this.emit('error', err)
}
// handle error messages from the postgres backend
_handleErrorMessage(msg) {
if (this._connecting) {
return this._handleErrorWhileConnecting(msg)
}
const activeQuery = this.activeQuery
if (!activeQuery) {
this._handleErrorEvent(msg)
return
}
this.activeQuery = null
activeQuery.handleError(msg, this.connection)
}
_handleRowDescription(msg) {
// delegate rowDescription to active query
this.activeQuery.handleRowDescription(msg)
}
_handleDataRow(msg) {
// delegate dataRow to active query
this.activeQuery.handleDataRow(msg)
}
_handlePortalSuspended(msg) {
// delegate portalSuspended to active query
this.activeQuery.handlePortalSuspended(this.connection)
}
_handleEmptyQuery(msg) {
// delegate emptyQuery to active query
this.activeQuery.handleEmptyQuery(this.connection)
}
_handleCommandComplete(msg) {
if (this.activeQuery == null) {
const error = new Error('Received unexpected commandComplete message from backend.')
this._handleErrorEvent(error)
return
}
// delegate commandComplete to active query
this.activeQuery.handleCommandComplete(msg, this.connection)
}
_handleParseComplete() {
if (this.activeQuery == null) {
const error = new Error('Received unexpected parseComplete message from backend.')
this._handleErrorEvent(error)
return
}
// if a prepared statement has a name and properly parses
// we track that its already been executed so we don't parse
// it again on the same client
if (this.activeQuery.name) {
this.connection.parsedStatements[this.activeQuery.name] = this.activeQuery.text
}
}
_handleCopyInResponse(msg) {
this.activeQuery.handleCopyInResponse(this.connection)
}
_handleCopyData(msg) {
this.activeQuery.handleCopyData(msg, this.connection)
}
_handleNotification(msg) {
this.emit('notification', msg)
}
_handleNotice(msg) {
this.emit('notice', msg)
}
getStartupConf() {
const params = this.connectionParameters
const data = {
user: params.user,
database: params.database,
}
const appName = params.application_name || params.fallback_application_name
if (appName) {
data.application_name = appName
}
if (params.replication) {
data.replication = '' + params.replication
}
if (params.statement_timeout) {
data.statement_timeout = String(parseInt(params.statement_timeout, 10))
}
if (params.lock_timeout) {
data.lock_timeout = String(parseInt(params.lock_timeout, 10))
}
if (params.idle_in_transaction_session_timeout) {
data.idle_in_transaction_session_timeout = String(parseInt(params.idle_in_transaction_session_timeout, 10))
}
if (params.options) {
data.options = params.options
}
return data
}
cancel(client, query) {
if (client.activeQuery === query) {
const con = this.connection
if (this.host && this.host.indexOf('/') === 0) {
con.connect(this.host + '/.s.PGSQL.' + this.port)
} else {
con.connect(this.port, this.host)
}
// once connection is established send cancel message
con.on('connect', function () {
con.cancel(client.processID, client.secretKey)
})
} else if (client.queryQueue.indexOf(query) !== -1) {
client.queryQueue.splice(client.queryQueue.indexOf(query), 1)
}
}
setTypeParser(oid, format, parseFn) {
return this._types.setTypeParser(oid, format, parseFn)
}
getTypeParser(oid, format) {
return this._types.getTypeParser(oid, format)
}
// escapeIdentifier and escapeLiteral moved to utility functions & exported
// on PG
// re-exported here for backwards compatibility
escapeIdentifier(str) {
return utils.escapeIdentifier(str)
}
escapeLiteral(str) {
return utils.escapeLiteral(str)
}
_pulseQueryQueue() {
if (this.readyForQuery === true) {
this.activeQuery = this.queryQueue.shift()
if (this.activeQuery) {
this.readyForQuery = false
this.hasExecuted = true
const queryError = this.activeQuery.submit(this.connection)
if (queryError) {
process.nextTick(() => {
this.activeQuery.handleError(queryError, this.connection)
this.readyForQuery = true
this._pulseQueryQueue()
})
}
} else if (this.hasExecuted) {
this.activeQuery = null
this.emit('drain')
}
}
}
query(config, values, callback) {
// can take in strings, config object or query object
let query
let result
let readTimeout
let readTimeoutTimer
let queryCallback
if (config === null || config === undefined) {
throw new TypeError('Client was passed a null or undefined query')
} else if (typeof config.submit === 'function') {
readTimeout = config.query_timeout || this.connectionParameters.query_timeout
result = query = config
if (typeof values === 'function') {
query.callback = query.callback || values
}
} else {
readTimeout = config.query_timeout || this.connectionParameters.query_timeout
query = new Query(config, values, callback)
if (!query.callback) {
result = new this._Promise((resolve, reject) => {
query.callback = (err, res) => (err ? reject(err) : resolve(res))
}).catch((err) => {
// replace the stack trace that leads to `TCP.onStreamRead` with one that leads back to the
// application that created the query
Error.captureStackTrace(err)
throw err
})
}
}
if (readTimeout) {
queryCallback = query.callback
readTimeoutTimer = setTimeout(() => {
const error = new Error('Query read timeout')
process.nextTick(() => {
query.handleError(error, this.connection)
})
queryCallback(error)
// we already returned an error,
// just do nothing if query completes
query.callback = () => {}
// Remove from queue
const index = this.queryQueue.indexOf(query)
if (index > -1) {
this.queryQueue.splice(index, 1)
}
this._pulseQueryQueue()
}, readTimeout)
query.callback = (err, res) => {
clearTimeout(readTimeoutTimer)
queryCallback(err, res)
}
}
if (this.binary && !query.binary) {
query.binary = true
}
if (query._result && !query._result._types) {
query._result._types = this._types
}
if (!this._queryable) {
process.nextTick(() => {
query.handleError(new Error('Client has encountered a connection error and is not queryable'), this.connection)
})
return result
}
if (this._ending) {
process.nextTick(() => {
query.handleError(new Error('Client was closed and is not queryable'), this.connection)
})
return result
}
this.queryQueue.push(query)
this._pulseQueryQueue()
return result
}
ref() {
this.connection.ref()
}
unref() {
this.connection.unref()
}
end(cb) {
this._ending = true
// if we have never connected, then end is a noop, callback immediately
if (!this.connection._connecting || this._ended) {
if (cb) {
cb()
} else {
return this._Promise.resolve()
}
}
if (this.activeQuery || !this._queryable) {
// if we have an active query we need to force a disconnect
// on the socket - otherwise a hung query could block end forever
this.connection.stream.destroy()
} else {
this.connection.end()
}
if (cb) {
this.connection.once('end', cb)
} else {
return new this._Promise((resolve) => {
this.connection.once('end', resolve)
})
}
}
}
// expose a Query constructor
Client.Query = Query
module.exports = Client

167
node_modules/pg/lib/connection-parameters.js generated vendored Normal file
View File

@ -0,0 +1,167 @@
'use strict'
const dns = require('dns')
const defaults = require('./defaults')
const parse = require('pg-connection-string').parse // parses a connection string
const val = function (key, config, envVar) {
if (envVar === undefined) {
envVar = process.env['PG' + key.toUpperCase()]
} else if (envVar === false) {
// do nothing ... use false
} else {
envVar = process.env[envVar]
}
return config[key] || envVar || defaults[key]
}
const readSSLConfigFromEnvironment = function () {
switch (process.env.PGSSLMODE) {
case 'disable':
return false
case 'prefer':
case 'require':
case 'verify-ca':
case 'verify-full':
return true
case 'no-verify':
return { rejectUnauthorized: false }
}
return defaults.ssl
}
// Convert arg to a string, surround in single quotes, and escape single quotes and backslashes
const quoteParamValue = function (value) {
return "'" + ('' + value).replace(/\\/g, '\\\\').replace(/'/g, "\\'") + "'"
}
const add = function (params, config, paramName) {
const value = config[paramName]
if (value !== undefined && value !== null) {
params.push(paramName + '=' + quoteParamValue(value))
}
}
class ConnectionParameters {
constructor(config) {
// if a string is passed, it is a raw connection string so we parse it into a config
config = typeof config === 'string' ? parse(config) : config || {}
// if the config has a connectionString defined, parse IT into the config we use
// this will override other default values with what is stored in connectionString
if (config.connectionString) {
config = Object.assign({}, config, parse(config.connectionString))
}
this.user = val('user', config)
this.database = val('database', config)
if (this.database === undefined) {
this.database = this.user
}
this.port = parseInt(val('port', config), 10)
this.host = val('host', config)
// "hiding" the password so it doesn't show up in stack traces
// or if the client is console.logged
Object.defineProperty(this, 'password', {
configurable: true,
enumerable: false,
writable: true,
value: val('password', config),
})
this.binary = val('binary', config)
this.options = val('options', config)
this.ssl = typeof config.ssl === 'undefined' ? readSSLConfigFromEnvironment() : config.ssl
if (typeof this.ssl === 'string') {
if (this.ssl === 'true') {
this.ssl = true
}
}
// support passing in ssl=no-verify via connection string
if (this.ssl === 'no-verify') {
this.ssl = { rejectUnauthorized: false }
}
if (this.ssl && this.ssl.key) {
Object.defineProperty(this.ssl, 'key', {
enumerable: false,
})
}
this.client_encoding = val('client_encoding', config)
this.replication = val('replication', config)
// a domain socket begins with '/'
this.isDomainSocket = !(this.host || '').indexOf('/')
this.application_name = val('application_name', config, 'PGAPPNAME')
this.fallback_application_name = val('fallback_application_name', config, false)
this.statement_timeout = val('statement_timeout', config, false)
this.lock_timeout = val('lock_timeout', config, false)
this.idle_in_transaction_session_timeout = val('idle_in_transaction_session_timeout', config, false)
this.query_timeout = val('query_timeout', config, false)
if (config.connectionTimeoutMillis === undefined) {
this.connect_timeout = process.env.PGCONNECT_TIMEOUT || 0
} else {
this.connect_timeout = Math.floor(config.connectionTimeoutMillis / 1000)
}
if (config.keepAlive === false) {
this.keepalives = 0
} else if (config.keepAlive === true) {
this.keepalives = 1
}
if (typeof config.keepAliveInitialDelayMillis === 'number') {
this.keepalives_idle = Math.floor(config.keepAliveInitialDelayMillis / 1000)
}
}
getLibpqConnectionString(cb) {
const params = []
add(params, this, 'user')
add(params, this, 'password')
add(params, this, 'port')
add(params, this, 'application_name')
add(params, this, 'fallback_application_name')
add(params, this, 'connect_timeout')
add(params, this, 'options')
const ssl = typeof this.ssl === 'object' ? this.ssl : this.ssl ? { sslmode: this.ssl } : {}
add(params, ssl, 'sslmode')
add(params, ssl, 'sslca')
add(params, ssl, 'sslkey')
add(params, ssl, 'sslcert')
add(params, ssl, 'sslrootcert')
if (this.database) {
params.push('dbname=' + quoteParamValue(this.database))
}
if (this.replication) {
params.push('replication=' + quoteParamValue(this.replication))
}
if (this.host) {
params.push('host=' + quoteParamValue(this.host))
}
if (this.isDomainSocket) {
return cb(null, params.join(' '))
}
if (this.client_encoding) {
params.push('client_encoding=' + quoteParamValue(this.client_encoding))
}
dns.lookup(this.host, function (err, address) {
if (err) return cb(err, null)
params.push('hostaddr=' + quoteParamValue(address))
return cb(null, params.join(' '))
})
}
}
module.exports = ConnectionParameters

222
node_modules/pg/lib/connection.js generated vendored Normal file
View File

@ -0,0 +1,222 @@
'use strict'
const EventEmitter = require('events').EventEmitter
const { parse, serialize } = require('pg-protocol')
const { getStream, getSecureStream } = require('./stream')
const flushBuffer = serialize.flush()
const syncBuffer = serialize.sync()
const endBuffer = serialize.end()
// TODO(bmc) support binary mode at some point
class Connection extends EventEmitter {
constructor(config) {
super()
config = config || {}
this.stream = config.stream || getStream(config.ssl)
if (typeof this.stream === 'function') {
this.stream = this.stream(config)
}
this._keepAlive = config.keepAlive
this._keepAliveInitialDelayMillis = config.keepAliveInitialDelayMillis
this.lastBuffer = false
this.parsedStatements = {}
this.ssl = config.ssl || false
this._ending = false
this._emitMessage = false
const self = this
this.on('newListener', function (eventName) {
if (eventName === 'message') {
self._emitMessage = true
}
})
}
connect(port, host) {
const self = this
this._connecting = true
this.stream.setNoDelay(true)
this.stream.connect(port, host)
this.stream.once('connect', function () {
if (self._keepAlive) {
self.stream.setKeepAlive(true, self._keepAliveInitialDelayMillis)
}
self.emit('connect')
})
const reportStreamError = function (error) {
// errors about disconnections should be ignored during disconnect
if (self._ending && (error.code === 'ECONNRESET' || error.code === 'EPIPE')) {
return
}
self.emit('error', error)
}
this.stream.on('error', reportStreamError)
this.stream.on('close', function () {
self.emit('end')
})
if (!this.ssl) {
return this.attachListeners(this.stream)
}
this.stream.once('data', function (buffer) {
const responseCode = buffer.toString('utf8')
switch (responseCode) {
case 'S': // Server supports SSL connections, continue with a secure connection
break
case 'N': // Server does not support SSL connections
self.stream.end()
return self.emit('error', new Error('The server does not support SSL connections'))
default:
// Any other response byte, including 'E' (ErrorResponse) indicating a server error
self.stream.end()
return self.emit('error', new Error('There was an error establishing an SSL connection'))
}
const options = {
socket: self.stream,
}
if (self.ssl !== true) {
Object.assign(options, self.ssl)
if ('key' in self.ssl) {
options.key = self.ssl.key
}
}
const net = require('net')
if (net.isIP && net.isIP(host) === 0) {
options.servername = host
}
try {
self.stream = getSecureStream(options)
} catch (err) {
return self.emit('error', err)
}
self.attachListeners(self.stream)
self.stream.on('error', reportStreamError)
self.emit('sslconnect')
})
}
attachListeners(stream) {
parse(stream, (msg) => {
const eventName = msg.name === 'error' ? 'errorMessage' : msg.name
if (this._emitMessage) {
this.emit('message', msg)
}
this.emit(eventName, msg)
})
}
requestSsl() {
this.stream.write(serialize.requestSsl())
}
startup(config) {
this.stream.write(serialize.startup(config))
}
cancel(processID, secretKey) {
this._send(serialize.cancel(processID, secretKey))
}
password(password) {
this._send(serialize.password(password))
}
sendSASLInitialResponseMessage(mechanism, initialResponse) {
this._send(serialize.sendSASLInitialResponseMessage(mechanism, initialResponse))
}
sendSCRAMClientFinalMessage(additionalData) {
this._send(serialize.sendSCRAMClientFinalMessage(additionalData))
}
_send(buffer) {
if (!this.stream.writable) {
return false
}
return this.stream.write(buffer)
}
query(text) {
this._send(serialize.query(text))
}
// send parse message
parse(query) {
this._send(serialize.parse(query))
}
// send bind message
bind(config) {
this._send(serialize.bind(config))
}
// send execute message
execute(config) {
this._send(serialize.execute(config))
}
flush() {
if (this.stream.writable) {
this.stream.write(flushBuffer)
}
}
sync() {
this._ending = true
this._send(syncBuffer)
}
ref() {
this.stream.ref()
}
unref() {
this.stream.unref()
}
end() {
// 0x58 = 'X'
this._ending = true
if (!this._connecting || !this.stream.writable) {
this.stream.end()
return
}
return this.stream.write(endBuffer, () => {
this.stream.end()
})
}
close(msg) {
this._send(serialize.close(msg))
}
describe(msg) {
this._send(serialize.describe(msg))
}
sendCopyFromChunk(chunk) {
this._send(serialize.copyData(chunk))
}
endCopyFrom() {
this._send(serialize.copyDone())
}
sendCopyFail(msg) {
this._send(serialize.copyFail(msg))
}
}
module.exports = Connection

122
node_modules/pg/lib/crypto/cert-signatures.js generated vendored Normal file
View File

@ -0,0 +1,122 @@
function x509Error(msg, cert) {
return new Error('SASL channel binding: ' + msg + ' when parsing public certificate ' + cert.toString('base64'))
}
function readASN1Length(data, index) {
let length = data[index++]
if (length < 0x80) return { length, index }
const lengthBytes = length & 0x7f
if (lengthBytes > 4) throw x509Error('bad length', data)
length = 0
for (let i = 0; i < lengthBytes; i++) {
length = (length << 8) | data[index++]
}
return { length, index }
}
function readASN1OID(data, index) {
if (data[index++] !== 0x6) throw x509Error('non-OID data', data) // 6 = OID
const { length: OIDLength, index: indexAfterOIDLength } = readASN1Length(data, index)
index = indexAfterOIDLength
const lastIndex = index + OIDLength
const byte1 = data[index++]
let oid = ((byte1 / 40) >> 0) + '.' + (byte1 % 40)
while (index < lastIndex) {
// loop over numbers in OID
let value = 0
while (index < lastIndex) {
// loop over bytes in number
const nextByte = data[index++]
value = (value << 7) | (nextByte & 0x7f)
if (nextByte < 0x80) break
}
oid += '.' + value
}
return { oid, index }
}
function expectASN1Seq(data, index) {
if (data[index++] !== 0x30) throw x509Error('non-sequence data', data) // 30 = Sequence
return readASN1Length(data, index)
}
function signatureAlgorithmHashFromCertificate(data, index) {
// read this thread: https://www.postgresql.org/message-id/17760-b6c61e752ec07060%40postgresql.org
if (index === undefined) index = 0
index = expectASN1Seq(data, index).index
const { length: certInfoLength, index: indexAfterCertInfoLength } = expectASN1Seq(data, index)
index = indexAfterCertInfoLength + certInfoLength // skip over certificate info
index = expectASN1Seq(data, index).index // skip over signature length field
const { oid, index: indexAfterOID } = readASN1OID(data, index)
switch (oid) {
// RSA
case '1.2.840.113549.1.1.4':
return 'MD5'
case '1.2.840.113549.1.1.5':
return 'SHA-1'
case '1.2.840.113549.1.1.11':
return 'SHA-256'
case '1.2.840.113549.1.1.12':
return 'SHA-384'
case '1.2.840.113549.1.1.13':
return 'SHA-512'
case '1.2.840.113549.1.1.14':
return 'SHA-224'
case '1.2.840.113549.1.1.15':
return 'SHA512-224'
case '1.2.840.113549.1.1.16':
return 'SHA512-256'
// ECDSA
case '1.2.840.10045.4.1':
return 'SHA-1'
case '1.2.840.10045.4.3.1':
return 'SHA-224'
case '1.2.840.10045.4.3.2':
return 'SHA-256'
case '1.2.840.10045.4.3.3':
return 'SHA-384'
case '1.2.840.10045.4.3.4':
return 'SHA-512'
// RSASSA-PSS: hash is indicated separately
case '1.2.840.113549.1.1.10': {
index = indexAfterOID
index = expectASN1Seq(data, index).index
if (data[index++] !== 0xa0) throw x509Error('non-tag data', data) // a0 = constructed tag 0
index = readASN1Length(data, index).index // skip over tag length field
index = expectASN1Seq(data, index).index // skip over sequence length field
const { oid: hashOID } = readASN1OID(data, index)
switch (hashOID) {
// standalone hash OIDs
case '1.2.840.113549.2.5':
return 'MD5'
case '1.3.14.3.2.26':
return 'SHA-1'
case '2.16.840.1.101.3.4.2.1':
return 'SHA-256'
case '2.16.840.1.101.3.4.2.2':
return 'SHA-384'
case '2.16.840.1.101.3.4.2.3':
return 'SHA-512'
}
throw x509Error('unknown hash OID ' + hashOID, data)
}
// Ed25519 -- see https: return//github.com/openssl/openssl/issues/15477
case '1.3.101.110':
case '1.3.101.112': // ph
return 'SHA-512'
// Ed448 -- still not in pg 17.2 (if supported, digest would be SHAKE256 x 64 bytes)
case '1.3.101.111':
case '1.3.101.113': // ph
throw x509Error('Ed448 certificate channel binding is not currently supported by Postgres')
}
throw x509Error('unknown OID ' + oid, data)
}
module.exports = { signatureAlgorithmHashFromCertificate }

212
node_modules/pg/lib/crypto/sasl.js generated vendored Normal file
View File

@ -0,0 +1,212 @@
'use strict'
const crypto = require('./utils')
const { signatureAlgorithmHashFromCertificate } = require('./cert-signatures')
function startSession(mechanisms, stream) {
const candidates = ['SCRAM-SHA-256']
if (stream) candidates.unshift('SCRAM-SHA-256-PLUS') // higher-priority, so placed first
const mechanism = candidates.find((candidate) => mechanisms.includes(candidate))
if (!mechanism) {
throw new Error('SASL: Only mechanism(s) ' + candidates.join(' and ') + ' are supported')
}
if (mechanism === 'SCRAM-SHA-256-PLUS' && typeof stream.getPeerCertificate !== 'function') {
// this should never happen if we are really talking to a Postgres server
throw new Error('SASL: Mechanism SCRAM-SHA-256-PLUS requires a certificate')
}
const clientNonce = crypto.randomBytes(18).toString('base64')
const gs2Header = mechanism === 'SCRAM-SHA-256-PLUS' ? 'p=tls-server-end-point' : stream ? 'y' : 'n'
return {
mechanism,
clientNonce,
response: gs2Header + ',,n=*,r=' + clientNonce,
message: 'SASLInitialResponse',
}
}
async function continueSession(session, password, serverData, stream) {
if (session.message !== 'SASLInitialResponse') {
throw new Error('SASL: Last message was not SASLInitialResponse')
}
if (typeof password !== 'string') {
throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: client password must be a string')
}
if (password === '') {
throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: client password must be a non-empty string')
}
if (typeof serverData !== 'string') {
throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: serverData must be a string')
}
const sv = parseServerFirstMessage(serverData)
if (!sv.nonce.startsWith(session.clientNonce)) {
throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: server nonce does not start with client nonce')
} else if (sv.nonce.length === session.clientNonce.length) {
throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: server nonce is too short')
}
const clientFirstMessageBare = 'n=*,r=' + session.clientNonce
const serverFirstMessage = 'r=' + sv.nonce + ',s=' + sv.salt + ',i=' + sv.iteration
// without channel binding:
let channelBinding = stream ? 'eSws' : 'biws' // 'y,,' or 'n,,', base64-encoded
// override if channel binding is in use:
if (session.mechanism === 'SCRAM-SHA-256-PLUS') {
const peerCert = stream.getPeerCertificate().raw
let hashName = signatureAlgorithmHashFromCertificate(peerCert)
if (hashName === 'MD5' || hashName === 'SHA-1') hashName = 'SHA-256'
const certHash = await crypto.hashByName(hashName, peerCert)
const bindingData = Buffer.concat([Buffer.from('p=tls-server-end-point,,'), Buffer.from(certHash)])
channelBinding = bindingData.toString('base64')
}
const clientFinalMessageWithoutProof = 'c=' + channelBinding + ',r=' + sv.nonce
const authMessage = clientFirstMessageBare + ',' + serverFirstMessage + ',' + clientFinalMessageWithoutProof
const saltBytes = Buffer.from(sv.salt, 'base64')
const saltedPassword = await crypto.deriveKey(password, saltBytes, sv.iteration)
const clientKey = await crypto.hmacSha256(saltedPassword, 'Client Key')
const storedKey = await crypto.sha256(clientKey)
const clientSignature = await crypto.hmacSha256(storedKey, authMessage)
const clientProof = xorBuffers(Buffer.from(clientKey), Buffer.from(clientSignature)).toString('base64')
const serverKey = await crypto.hmacSha256(saltedPassword, 'Server Key')
const serverSignatureBytes = await crypto.hmacSha256(serverKey, authMessage)
session.message = 'SASLResponse'
session.serverSignature = Buffer.from(serverSignatureBytes).toString('base64')
session.response = clientFinalMessageWithoutProof + ',p=' + clientProof
}
function finalizeSession(session, serverData) {
if (session.message !== 'SASLResponse') {
throw new Error('SASL: Last message was not SASLResponse')
}
if (typeof serverData !== 'string') {
throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: serverData must be a string')
}
const { serverSignature } = parseServerFinalMessage(serverData)
if (serverSignature !== session.serverSignature) {
throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature does not match')
}
}
/**
* printable = %x21-2B / %x2D-7E
* ;; Printable ASCII except ",".
* ;; Note that any "printable" is also
* ;; a valid "value".
*/
function isPrintableChars(text) {
if (typeof text !== 'string') {
throw new TypeError('SASL: text must be a string')
}
return text
.split('')
.map((_, i) => text.charCodeAt(i))
.every((c) => (c >= 0x21 && c <= 0x2b) || (c >= 0x2d && c <= 0x7e))
}
/**
* base64-char = ALPHA / DIGIT / "/" / "+"
*
* base64-4 = 4base64-char
*
* base64-3 = 3base64-char "="
*
* base64-2 = 2base64-char "=="
*
* base64 = *base64-4 [base64-3 / base64-2]
*/
function isBase64(text) {
return /^(?:[a-zA-Z0-9+/]{4})*(?:[a-zA-Z0-9+/]{2}==|[a-zA-Z0-9+/]{3}=)?$/.test(text)
}
function parseAttributePairs(text) {
if (typeof text !== 'string') {
throw new TypeError('SASL: attribute pairs text must be a string')
}
return new Map(
text.split(',').map((attrValue) => {
if (!/^.=/.test(attrValue)) {
throw new Error('SASL: Invalid attribute pair entry')
}
const name = attrValue[0]
const value = attrValue.substring(2)
return [name, value]
})
)
}
function parseServerFirstMessage(data) {
const attrPairs = parseAttributePairs(data)
const nonce = attrPairs.get('r')
if (!nonce) {
throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: nonce missing')
} else if (!isPrintableChars(nonce)) {
throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: nonce must only contain printable characters')
}
const salt = attrPairs.get('s')
if (!salt) {
throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: salt missing')
} else if (!isBase64(salt)) {
throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: salt must be base64')
}
const iterationText = attrPairs.get('i')
if (!iterationText) {
throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: iteration missing')
} else if (!/^[1-9][0-9]*$/.test(iterationText)) {
throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: invalid iteration count')
}
const iteration = parseInt(iterationText, 10)
return {
nonce,
salt,
iteration,
}
}
function parseServerFinalMessage(serverData) {
const attrPairs = parseAttributePairs(serverData)
const serverSignature = attrPairs.get('v')
if (!serverSignature) {
throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature is missing')
} else if (!isBase64(serverSignature)) {
throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature must be base64')
}
return {
serverSignature,
}
}
function xorBuffers(a, b) {
if (!Buffer.isBuffer(a)) {
throw new TypeError('first argument must be a Buffer')
}
if (!Buffer.isBuffer(b)) {
throw new TypeError('second argument must be a Buffer')
}
if (a.length !== b.length) {
throw new Error('Buffer lengths must match')
}
if (a.length === 0) {
throw new Error('Buffers cannot be empty')
}
return Buffer.from(a.map((_, i) => a[i] ^ b[i]))
}
module.exports = {
startSession,
continueSession,
finalizeSession,
}

43
node_modules/pg/lib/crypto/utils-legacy.js generated vendored Normal file
View File

@ -0,0 +1,43 @@
'use strict'
// This file contains crypto utility functions for versions of Node.js < 15.0.0,
// which does not support the WebCrypto.subtle API.
const nodeCrypto = require('crypto')
function md5(string) {
return nodeCrypto.createHash('md5').update(string, 'utf-8').digest('hex')
}
// See AuthenticationMD5Password at https://www.postgresql.org/docs/current/static/protocol-flow.html
function postgresMd5PasswordHash(user, password, salt) {
const inner = md5(password + user)
const outer = md5(Buffer.concat([Buffer.from(inner), salt]))
return 'md5' + outer
}
function sha256(text) {
return nodeCrypto.createHash('sha256').update(text).digest()
}
function hashByName(hashName, text) {
hashName = hashName.replace(/(\D)-/, '$1') // e.g. SHA-256 -> SHA256
return nodeCrypto.createHash(hashName).update(text).digest()
}
function hmacSha256(key, msg) {
return nodeCrypto.createHmac('sha256', key).update(msg).digest()
}
async function deriveKey(password, salt, iterations) {
return nodeCrypto.pbkdf2Sync(password, salt, iterations, 32, 'sha256')
}
module.exports = {
postgresMd5PasswordHash,
randomBytes: nodeCrypto.randomBytes,
deriveKey,
sha256,
hashByName,
hmacSha256,
md5,
}

89
node_modules/pg/lib/crypto/utils-webcrypto.js generated vendored Normal file
View File

@ -0,0 +1,89 @@
const nodeCrypto = require('crypto')
module.exports = {
postgresMd5PasswordHash,
randomBytes,
deriveKey,
sha256,
hashByName,
hmacSha256,
md5,
}
/**
* The Web Crypto API - grabbed from the Node.js library or the global
* @type Crypto
*/
// eslint-disable-next-line no-undef
const webCrypto = nodeCrypto.webcrypto || globalThis.crypto
/**
* The SubtleCrypto API for low level crypto operations.
* @type SubtleCrypto
*/
const subtleCrypto = webCrypto.subtle
const textEncoder = new TextEncoder()
/**
*
* @param {*} length
* @returns
*/
function randomBytes(length) {
return webCrypto.getRandomValues(Buffer.alloc(length))
}
async function md5(string) {
try {
return nodeCrypto.createHash('md5').update(string, 'utf-8').digest('hex')
} catch (e) {
// `createHash()` failed so we are probably not in Node.js, use the WebCrypto API instead.
// Note that the MD5 algorithm on WebCrypto is not available in Node.js.
// This is why we cannot just use WebCrypto in all environments.
const data = typeof string === 'string' ? textEncoder.encode(string) : string
const hash = await subtleCrypto.digest('MD5', data)
return Array.from(new Uint8Array(hash))
.map((b) => b.toString(16).padStart(2, '0'))
.join('')
}
}
// See AuthenticationMD5Password at https://www.postgresql.org/docs/current/static/protocol-flow.html
async function postgresMd5PasswordHash(user, password, salt) {
const inner = await md5(password + user)
const outer = await md5(Buffer.concat([Buffer.from(inner), salt]))
return 'md5' + outer
}
/**
* Create a SHA-256 digest of the given data
* @param {Buffer} data
*/
async function sha256(text) {
return await subtleCrypto.digest('SHA-256', text)
}
async function hashByName(hashName, text) {
return await subtleCrypto.digest(hashName, text)
}
/**
* Sign the message with the given key
* @param {ArrayBuffer} keyBuffer
* @param {string} msg
*/
async function hmacSha256(keyBuffer, msg) {
const key = await subtleCrypto.importKey('raw', keyBuffer, { name: 'HMAC', hash: 'SHA-256' }, false, ['sign'])
return await subtleCrypto.sign('HMAC', key, textEncoder.encode(msg))
}
/**
* Derive a key from the password and salt
* @param {string} password
* @param {Uint8Array} salt
* @param {number} iterations
*/
async function deriveKey(password, salt, iterations) {
const key = await subtleCrypto.importKey('raw', textEncoder.encode(password), 'PBKDF2', false, ['deriveBits'])
const params = { name: 'PBKDF2', hash: 'SHA-256', salt: salt, iterations: iterations }
return await subtleCrypto.deriveBits(params, key, 32 * 8, ['deriveBits'])
}

9
node_modules/pg/lib/crypto/utils.js generated vendored Normal file
View File

@ -0,0 +1,9 @@
'use strict'
const useLegacyCrypto = parseInt(process.versions && process.versions.node && process.versions.node.split('.')[0]) < 15
if (useLegacyCrypto) {
// We are on an old version of Node.js that requires legacy crypto utilities.
module.exports = require('./utils-legacy')
} else {
module.exports = require('./utils-webcrypto')
}

84
node_modules/pg/lib/defaults.js generated vendored Normal file
View File

@ -0,0 +1,84 @@
'use strict'
module.exports = {
// database host. defaults to localhost
host: 'localhost',
// database user's name
user: process.platform === 'win32' ? process.env.USERNAME : process.env.USER,
// name of database to connect
database: undefined,
// database user's password
password: null,
// a Postgres connection string to be used instead of setting individual connection items
// NOTE: Setting this value will cause it to override any other value (such as database or user) defined
// in the defaults object.
connectionString: undefined,
// database port
port: 5432,
// number of rows to return at a time from a prepared statement's
// portal. 0 will return all rows at once
rows: 0,
// binary result mode
binary: false,
// Connection pool options - see https://github.com/brianc/node-pg-pool
// number of connections to use in connection pool
// 0 will disable connection pooling
max: 10,
// max milliseconds a client can go unused before it is removed
// from the pool and destroyed
idleTimeoutMillis: 30000,
client_encoding: '',
ssl: false,
application_name: undefined,
fallback_application_name: undefined,
options: undefined,
parseInputDatesAsUTC: false,
// max milliseconds any query using this connection will execute for before timing out in error.
// false=unlimited
statement_timeout: false,
// Abort any statement that waits longer than the specified duration in milliseconds while attempting to acquire a lock.
// false=unlimited
lock_timeout: false,
// Terminate any session with an open transaction that has been idle for longer than the specified duration in milliseconds
// false=unlimited
idle_in_transaction_session_timeout: false,
// max milliseconds to wait for query to complete (client side)
query_timeout: false,
connect_timeout: 0,
keepalives: 1,
keepalives_idle: 0,
}
const pgTypes = require('pg-types')
// save default parsers
const parseBigInteger = pgTypes.getTypeParser(20, 'text')
const parseBigIntegerArray = pgTypes.getTypeParser(1016, 'text')
// parse int8 so you can get your count values as actual numbers
module.exports.__defineSetter__('parseInt8', function (val) {
pgTypes.setTypeParser(20, 'text', val ? pgTypes.getTypeParser(23, 'text') : parseBigInteger)
pgTypes.setTypeParser(1016, 'text', val ? pgTypes.getTypeParser(1007, 'text') : parseBigIntegerArray)
})

64
node_modules/pg/lib/index.js generated vendored Normal file
View File

@ -0,0 +1,64 @@
'use strict'
const Client = require('./client')
const defaults = require('./defaults')
const Connection = require('./connection')
const Result = require('./result')
const utils = require('./utils')
const Pool = require('pg-pool')
const TypeOverrides = require('./type-overrides')
const { DatabaseError } = require('pg-protocol')
const { escapeIdentifier, escapeLiteral } = require('./utils')
const poolFactory = (Client) => {
return class BoundPool extends Pool {
constructor(options) {
super(options, Client)
}
}
}
const PG = function (clientConstructor) {
this.defaults = defaults
this.Client = clientConstructor
this.Query = this.Client.Query
this.Pool = poolFactory(this.Client)
this._pools = []
this.Connection = Connection
this.types = require('pg-types')
this.DatabaseError = DatabaseError
this.TypeOverrides = TypeOverrides
this.escapeIdentifier = escapeIdentifier
this.escapeLiteral = escapeLiteral
this.Result = Result
this.utils = utils
}
if (typeof process.env.NODE_PG_FORCE_NATIVE !== 'undefined') {
module.exports = new PG(require('./native'))
} else {
module.exports = new PG(Client)
// lazy require native module...the native module may not have installed
Object.defineProperty(module.exports, 'native', {
configurable: true,
enumerable: false,
get() {
let native = null
try {
native = new PG(require('./native'))
} catch (err) {
if (err.code !== 'MODULE_NOT_FOUND') {
throw err
}
}
// overwrite module.exports.native so that getter is never called again
Object.defineProperty(module.exports, 'native', {
value: native,
})
return native
},
})
}

308
node_modules/pg/lib/native/client.js generated vendored Normal file
View File

@ -0,0 +1,308 @@
'use strict'
// eslint-disable-next-line
var Native
// eslint-disable-next-line no-useless-catch
try {
// Wrap this `require()` in a try-catch to avoid upstream bundlers from complaining that this might not be available since it is an optional import
Native = require('pg-native')
} catch (e) {
throw e
}
const TypeOverrides = require('../type-overrides')
const EventEmitter = require('events').EventEmitter
const util = require('util')
const ConnectionParameters = require('../connection-parameters')
const NativeQuery = require('./query')
const Client = (module.exports = function (config) {
EventEmitter.call(this)
config = config || {}
this._Promise = config.Promise || global.Promise
this._types = new TypeOverrides(config.types)
this.native = new Native({
types: this._types,
})
this._queryQueue = []
this._ending = false
this._connecting = false
this._connected = false
this._queryable = true
// keep these on the object for legacy reasons
// for the time being. TODO: deprecate all this jazz
const cp = (this.connectionParameters = new ConnectionParameters(config))
if (config.nativeConnectionString) cp.nativeConnectionString = config.nativeConnectionString
this.user = cp.user
// "hiding" the password so it doesn't show up in stack traces
// or if the client is console.logged
Object.defineProperty(this, 'password', {
configurable: true,
enumerable: false,
writable: true,
value: cp.password,
})
this.database = cp.database
this.host = cp.host
this.port = cp.port
// a hash to hold named queries
this.namedQueries = {}
})
Client.Query = NativeQuery
util.inherits(Client, EventEmitter)
Client.prototype._errorAllQueries = function (err) {
const enqueueError = (query) => {
process.nextTick(() => {
query.native = this.native
query.handleError(err)
})
}
if (this._hasActiveQuery()) {
enqueueError(this._activeQuery)
this._activeQuery = null
}
this._queryQueue.forEach(enqueueError)
this._queryQueue.length = 0
}
// connect to the backend
// pass an optional callback to be called once connected
// or with an error if there was a connection error
Client.prototype._connect = function (cb) {
const self = this
if (this._connecting) {
process.nextTick(() => cb(new Error('Client has already been connected. You cannot reuse a client.')))
return
}
this._connecting = true
this.connectionParameters.getLibpqConnectionString(function (err, conString) {
if (self.connectionParameters.nativeConnectionString) conString = self.connectionParameters.nativeConnectionString
if (err) return cb(err)
self.native.connect(conString, function (err) {
if (err) {
self.native.end()
return cb(err)
}
// set internal states to connected
self._connected = true
// handle connection errors from the native layer
self.native.on('error', function (err) {
self._queryable = false
self._errorAllQueries(err)
self.emit('error', err)
})
self.native.on('notification', function (msg) {
self.emit('notification', {
channel: msg.relname,
payload: msg.extra,
})
})
// signal we are connected now
self.emit('connect')
self._pulseQueryQueue(true)
cb()
})
})
}
Client.prototype.connect = function (callback) {
if (callback) {
this._connect(callback)
return
}
return new this._Promise((resolve, reject) => {
this._connect((error) => {
if (error) {
reject(error)
} else {
resolve()
}
})
})
}
// send a query to the server
// this method is highly overloaded to take
// 1) string query, optional array of parameters, optional function callback
// 2) object query with {
// string query
// optional array values,
// optional function callback instead of as a separate parameter
// optional string name to name & cache the query plan
// optional string rowMode = 'array' for an array of results
// }
Client.prototype.query = function (config, values, callback) {
let query
let result
let readTimeout
let readTimeoutTimer
let queryCallback
if (config === null || config === undefined) {
throw new TypeError('Client was passed a null or undefined query')
} else if (typeof config.submit === 'function') {
readTimeout = config.query_timeout || this.connectionParameters.query_timeout
result = query = config
// accept query(new Query(...), (err, res) => { }) style
if (typeof values === 'function') {
config.callback = values
}
} else {
readTimeout = config.query_timeout || this.connectionParameters.query_timeout
query = new NativeQuery(config, values, callback)
if (!query.callback) {
let resolveOut, rejectOut
result = new this._Promise((resolve, reject) => {
resolveOut = resolve
rejectOut = reject
}).catch((err) => {
Error.captureStackTrace(err)
throw err
})
query.callback = (err, res) => (err ? rejectOut(err) : resolveOut(res))
}
}
if (readTimeout) {
queryCallback = query.callback
readTimeoutTimer = setTimeout(() => {
const error = new Error('Query read timeout')
process.nextTick(() => {
query.handleError(error, this.connection)
})
queryCallback(error)
// we already returned an error,
// just do nothing if query completes
query.callback = () => {}
// Remove from queue
const index = this._queryQueue.indexOf(query)
if (index > -1) {
this._queryQueue.splice(index, 1)
}
this._pulseQueryQueue()
}, readTimeout)
query.callback = (err, res) => {
clearTimeout(readTimeoutTimer)
queryCallback(err, res)
}
}
if (!this._queryable) {
query.native = this.native
process.nextTick(() => {
query.handleError(new Error('Client has encountered a connection error and is not queryable'))
})
return result
}
if (this._ending) {
query.native = this.native
process.nextTick(() => {
query.handleError(new Error('Client was closed and is not queryable'))
})
return result
}
this._queryQueue.push(query)
this._pulseQueryQueue()
return result
}
// disconnect from the backend server
Client.prototype.end = function (cb) {
const self = this
this._ending = true
if (!this._connected) {
this.once('connect', this.end.bind(this, cb))
}
let result
if (!cb) {
result = new this._Promise(function (resolve, reject) {
cb = (err) => (err ? reject(err) : resolve())
})
}
this.native.end(function () {
self._errorAllQueries(new Error('Connection terminated'))
process.nextTick(() => {
self.emit('end')
if (cb) cb()
})
})
return result
}
Client.prototype._hasActiveQuery = function () {
return this._activeQuery && this._activeQuery.state !== 'error' && this._activeQuery.state !== 'end'
}
Client.prototype._pulseQueryQueue = function (initialConnection) {
if (!this._connected) {
return
}
if (this._hasActiveQuery()) {
return
}
const query = this._queryQueue.shift()
if (!query) {
if (!initialConnection) {
this.emit('drain')
}
return
}
this._activeQuery = query
query.submit(this)
const self = this
query.once('_done', function () {
self._pulseQueryQueue()
})
}
// attempt to cancel an in-progress query
Client.prototype.cancel = function (query) {
if (this._activeQuery === query) {
this.native.cancel(function () {})
} else if (this._queryQueue.indexOf(query) !== -1) {
this._queryQueue.splice(this._queryQueue.indexOf(query), 1)
}
}
Client.prototype.ref = function () {}
Client.prototype.unref = function () {}
Client.prototype.setTypeParser = function (oid, format, parseFn) {
return this._types.setTypeParser(oid, format, parseFn)
}
Client.prototype.getTypeParser = function (oid, format) {
return this._types.getTypeParser(oid, format)
}

2
node_modules/pg/lib/native/index.js generated vendored Normal file
View File

@ -0,0 +1,2 @@
'use strict'
module.exports = require('./client')

Some files were not shown because too many files have changed in this diff Show More