[upd] node
This commit is contained in:
parent
5c5c2dbec7
commit
75224ac7a4
158 changed files with 14375 additions and 405 deletions
514
lib/node/node_modules/.package-lock.json
generated
vendored
514
lib/node/node_modules/.package-lock.json
generated
vendored
File diff suppressed because it is too large
Load diff
18
lib/node/node_modules/node-abi/abi_registry.json
generated
vendored
18
lib/node/node_modules/node-abi/abi_registry.json
generated
vendored
|
@ -98,8 +98,8 @@
|
|||
"2024-10-29",
|
||||
"2025-10-21"
|
||||
],
|
||||
"future": true,
|
||||
"abi": "124"
|
||||
"future": false,
|
||||
"abi": "127"
|
||||
},
|
||||
{
|
||||
"abi": "70",
|
||||
|
@ -309,6 +309,20 @@
|
|||
"future": true,
|
||||
"lts": false,
|
||||
"runtime": "electron",
|
||||
"target": "31.0.0-alpha.1"
|
||||
},
|
||||
{
|
||||
"abi": "123",
|
||||
"future": false,
|
||||
"lts": false,
|
||||
"runtime": "electron",
|
||||
"target": "30.0.0-alpha.1"
|
||||
},
|
||||
{
|
||||
"abi": "125",
|
||||
"future": true,
|
||||
"lts": false,
|
||||
"runtime": "electron",
|
||||
"target": "31.0.0-beta.7"
|
||||
}
|
||||
]
|
2
lib/node/node_modules/node-abi/package.json
generated
vendored
2
lib/node/node_modules/node-abi/package.json
generated
vendored
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "node-abi",
|
||||
"version": "3.57.0",
|
||||
"version": "3.63.0",
|
||||
"description": "Get the Node ABI for a given target and runtime, and vice versa.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
|
|
21
lib/node/node_modules/pg-cloudflare/LICENSE
generated
vendored
Normal file
21
lib/node/node_modules/pg-cloudflare/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2010 - 2021 Brian Carlson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
33
lib/node/node_modules/pg-cloudflare/README.md
generated
vendored
Normal file
33
lib/node/node_modules/pg-cloudflare/README.md
generated
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
# pg-cloudflare
|
||||
|
||||
A socket implementation that can run on Cloudflare Workers using native TCP connections.
|
||||
|
||||
## install
|
||||
|
||||
```
|
||||
npm i --save-dev pg-cloudflare
|
||||
```
|
||||
|
||||
### license
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2023 Brian M. Carlson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
2
lib/node/node_modules/pg-cloudflare/dist/empty.d.ts
generated
vendored
Normal file
2
lib/node/node_modules/pg-cloudflare/dist/empty.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
declare const _default: {};
|
||||
export default _default;
|
4
lib/node/node_modules/pg-cloudflare/dist/empty.js
generated
vendored
Normal file
4
lib/node/node_modules/pg-cloudflare/dist/empty.js
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
// This is an empty module that is served up when outside of a workerd environment
|
||||
// See the `exports` field in package.json
|
||||
export default {};
|
||||
//# sourceMappingURL=empty.js.map
|
1
lib/node/node_modules/pg-cloudflare/dist/empty.js.map
generated
vendored
Normal file
1
lib/node/node_modules/pg-cloudflare/dist/empty.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"empty.js","sourceRoot":"","sources":["../src/empty.ts"],"names":[],"mappings":"AAAA,kFAAkF;AAClF,0CAA0C;AAC1C,eAAe,EAAE,CAAA"}
|
31
lib/node/node_modules/pg-cloudflare/dist/index.d.ts
generated
vendored
Normal file
31
lib/node/node_modules/pg-cloudflare/dist/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,31 @@
|
|||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
import { TlsOptions } from 'cloudflare:sockets';
|
||||
import { EventEmitter } from 'events';
|
||||
/**
|
||||
* Wrapper around the Cloudflare built-in socket that can be used by the `Connection`.
|
||||
*/
|
||||
export declare class CloudflareSocket extends EventEmitter {
|
||||
readonly ssl: boolean;
|
||||
writable: boolean;
|
||||
destroyed: boolean;
|
||||
private _upgrading;
|
||||
private _upgraded;
|
||||
private _cfSocket;
|
||||
private _cfWriter;
|
||||
private _cfReader;
|
||||
constructor(ssl: boolean);
|
||||
setNoDelay(): this;
|
||||
setKeepAlive(): this;
|
||||
ref(): this;
|
||||
unref(): this;
|
||||
connect(port: number, host: string, connectListener?: (...args: unknown[]) => void): Promise<this | undefined>;
|
||||
_listen(): Promise<void>;
|
||||
_listenOnce(): Promise<void>;
|
||||
write(data: Uint8Array | string, encoding?: BufferEncoding, callback?: (...args: unknown[]) => void): true | void;
|
||||
end(data?: Buffer, encoding?: BufferEncoding, callback?: (...args: unknown[]) => void): this;
|
||||
destroy(reason: string): this;
|
||||
startTls(options: TlsOptions): void;
|
||||
_addClosedHandler(): void;
|
||||
}
|
146
lib/node/node_modules/pg-cloudflare/dist/index.js
generated
vendored
Normal file
146
lib/node/node_modules/pg-cloudflare/dist/index.js
generated
vendored
Normal file
|
@ -0,0 +1,146 @@
|
|||
import { EventEmitter } from 'events';
|
||||
/**
|
||||
* Wrapper around the Cloudflare built-in socket that can be used by the `Connection`.
|
||||
*/
|
||||
export class CloudflareSocket extends EventEmitter {
|
||||
constructor(ssl) {
|
||||
super();
|
||||
this.ssl = ssl;
|
||||
this.writable = false;
|
||||
this.destroyed = false;
|
||||
this._upgrading = false;
|
||||
this._upgraded = false;
|
||||
this._cfSocket = null;
|
||||
this._cfWriter = null;
|
||||
this._cfReader = null;
|
||||
}
|
||||
setNoDelay() {
|
||||
return this;
|
||||
}
|
||||
setKeepAlive() {
|
||||
return this;
|
||||
}
|
||||
ref() {
|
||||
return this;
|
||||
}
|
||||
unref() {
|
||||
return this;
|
||||
}
|
||||
async connect(port, host, connectListener) {
|
||||
try {
|
||||
log('connecting');
|
||||
if (connectListener)
|
||||
this.once('connect', connectListener);
|
||||
const options = this.ssl ? { secureTransport: 'starttls' } : {};
|
||||
const { connect } = await import('cloudflare:sockets');
|
||||
this._cfSocket = connect(`${host}:${port}`, options);
|
||||
this._cfWriter = this._cfSocket.writable.getWriter();
|
||||
this._addClosedHandler();
|
||||
this._cfReader = this._cfSocket.readable.getReader();
|
||||
if (this.ssl) {
|
||||
this._listenOnce().catch((e) => this.emit('error', e));
|
||||
}
|
||||
else {
|
||||
this._listen().catch((e) => this.emit('error', e));
|
||||
}
|
||||
await this._cfWriter.ready;
|
||||
log('socket ready');
|
||||
this.writable = true;
|
||||
this.emit('connect');
|
||||
return this;
|
||||
}
|
||||
catch (e) {
|
||||
this.emit('error', e);
|
||||
}
|
||||
}
|
||||
async _listen() {
|
||||
while (true) {
|
||||
log('awaiting receive from CF socket');
|
||||
const { done, value } = await this._cfReader.read();
|
||||
log('CF socket received:', done, value);
|
||||
if (done) {
|
||||
log('done');
|
||||
break;
|
||||
}
|
||||
this.emit('data', Buffer.from(value));
|
||||
}
|
||||
}
|
||||
async _listenOnce() {
|
||||
log('awaiting first receive from CF socket');
|
||||
const { done, value } = await this._cfReader.read();
|
||||
log('First CF socket received:', done, value);
|
||||
this.emit('data', Buffer.from(value));
|
||||
}
|
||||
write(data, encoding = 'utf8', callback = () => { }) {
|
||||
if (data.length === 0)
|
||||
return callback();
|
||||
if (typeof data === 'string')
|
||||
data = Buffer.from(data, encoding);
|
||||
log('sending data direct:', data);
|
||||
this._cfWriter.write(data).then(() => {
|
||||
log('data sent');
|
||||
callback();
|
||||
}, (err) => {
|
||||
log('send error', err);
|
||||
callback(err);
|
||||
});
|
||||
return true;
|
||||
}
|
||||
end(data = Buffer.alloc(0), encoding = 'utf8', callback = () => { }) {
|
||||
log('ending CF socket');
|
||||
this.write(data, encoding, (err) => {
|
||||
this._cfSocket.close();
|
||||
if (callback)
|
||||
callback(err);
|
||||
});
|
||||
return this;
|
||||
}
|
||||
destroy(reason) {
|
||||
log('destroying CF socket', reason);
|
||||
this.destroyed = true;
|
||||
return this.end();
|
||||
}
|
||||
startTls(options) {
|
||||
if (this._upgraded) {
|
||||
// Don't try to upgrade again.
|
||||
this.emit('error', 'Cannot call `startTls()` more than once on a socket');
|
||||
return;
|
||||
}
|
||||
this._cfWriter.releaseLock();
|
||||
this._cfReader.releaseLock();
|
||||
this._upgrading = true;
|
||||
this._cfSocket = this._cfSocket.startTls(options);
|
||||
this._cfWriter = this._cfSocket.writable.getWriter();
|
||||
this._cfReader = this._cfSocket.readable.getReader();
|
||||
this._addClosedHandler();
|
||||
this._listen().catch((e) => this.emit('error', e));
|
||||
}
|
||||
_addClosedHandler() {
|
||||
this._cfSocket.closed.then(() => {
|
||||
if (!this._upgrading) {
|
||||
log('CF socket closed');
|
||||
this._cfSocket = null;
|
||||
this.emit('close');
|
||||
}
|
||||
else {
|
||||
this._upgrading = false;
|
||||
this._upgraded = true;
|
||||
}
|
||||
}).catch((e) => this.emit('error', e));
|
||||
}
|
||||
}
|
||||
const debug = false;
|
||||
function dump(data) {
|
||||
if (data instanceof Uint8Array || data instanceof ArrayBuffer) {
|
||||
const hex = Buffer.from(data).toString('hex');
|
||||
const str = new TextDecoder().decode(data);
|
||||
return `\n>>> STR: "${str.replace(/\n/g, '\\n')}"\n>>> HEX: ${hex}\n`;
|
||||
}
|
||||
else {
|
||||
return data;
|
||||
}
|
||||
}
|
||||
function log(...args) {
|
||||
debug && console.log(...args.map(dump));
|
||||
}
|
||||
//# sourceMappingURL=index.js.map
|
1
lib/node/node_modules/pg-cloudflare/dist/index.js.map
generated
vendored
Normal file
1
lib/node/node_modules/pg-cloudflare/dist/index.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAA;AAErC;;GAEG;AACH,MAAM,OAAO,gBAAiB,SAAQ,YAAY;IAUhD,YAAqB,GAAY;QAC/B,KAAK,EAAE,CAAA;QADY,QAAG,GAAH,GAAG,CAAS;QATjC,aAAQ,GAAG,KAAK,CAAA;QAChB,cAAS,GAAG,KAAK,CAAA;QAET,eAAU,GAAG,KAAK,CAAA;QAClB,cAAS,GAAG,KAAK,CAAA;QACjB,cAAS,GAAkB,IAAI,CAAA;QAC/B,cAAS,GAAuC,IAAI,CAAA;QACpD,cAAS,GAAuC,IAAI,CAAA;IAI5D,CAAC;IAED,UAAU;QACR,OAAO,IAAI,CAAA;IACb,CAAC;IACD,YAAY;QACV,OAAO,IAAI,CAAA;IACb,CAAC;IACD,GAAG;QACD,OAAO,IAAI,CAAA;IACb,CAAC;IACD,KAAK;QACH,OAAO,IAAI,CAAA;IACb,CAAC;IAED,KAAK,CAAC,OAAO,CAAC,IAAY,EAAE,IAAY,EAAE,eAA8C;QACtF,IAAI;YACF,GAAG,CAAC,YAAY,CAAC,CAAA;YACjB,IAAI,eAAe;gBAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,eAAe,CAAC,CAAA;YAE1D,MAAM,OAAO,GAAkB,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,eAAe,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAA;YAC9E,MAAM,EAAE,OAAO,EAAE,GAAG,MAAM,MAAM,CAAC,oBAAoB,CAAC,CAAA;YACtD,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE,EAAE,OAAO,CAAC,CAAA;YACpD,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;YACpD,IAAI,CAAC,iBAAiB,EAAE,CAAA;YAExB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;YACpD,IAAI,IAAI,CAAC,GAAG,EAAE;gBACZ,IAAI,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;aACvD;iBAAM;gBACL,IAAI,CAAC,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;aACnD;YAED,MAAM,IAAI,CAAC,SAAU,CAAC,KAAK,CAAA;YAC3B,GAAG,CAAC,cAAc,CAAC,CAAA;YACnB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAA;YACpB,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;YAEpB,OAAO,IAAI,CAAA;SACZ;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAA;SACtB;IACH,CAAC;IAED,KAAK,CAAC,OAAO;QACX,OAAO,IAAI,EAAE;YACX,GAAG,CAAC,iCAAiC,CAAC,CAAA;YACtC,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,IAAI,CAAC,SAAU,CAAC,IAAI,EAAE,CAAA;YACpD,GAAG,CAAC,qBAAqB,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;YACvC,IAAI,IAAI,EAAE;gBACR,GAAG,CAAC,MAAM,CAAC,CAAA;gBACX,MAAK;aACN;YACD,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAA;SACtC;IACH,CAAC;IAED,KAAK,CAAC,WAAW;QACf,GAAG,CAAC,uCAAuC,CAAC,CAAA;QAC5C,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,IAAI,CAAC,SAAU,CAAC,IAAI,EAAE,CAAA;QACpD,GAAG,CAAC,2BAA2B,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;QAC7C,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAA;IACvC,CAAC;IAED,KAAK,CACH,IAAyB,EACzB,WAA2B,MAAM,EACjC,WAAyC,GAAG,EAAE,GAAE,CAAC;QAEjD,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO,QAAQ,EAAE,CAAA;QACxC,IAAI,OAAO,IAAI,KAAK,QAAQ;YAAE,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;QAEhE,GAAG,CAAC,sBAAsB,EAAE,IAAI,CAAC,CAAA;QACjC,IAAI,CAAC,SAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,CAC9B,GAAG,EAAE;YACH,GAAG,CAAC,WAAW,CAAC,CAAA;YAChB,QAAQ,EAAE,CAAA;QACZ,CAAC,EACD,CAAC,GAAG,EAAE,EAAE;YACN,GAAG,CAAC,YAAY,EAAE,GAAG,CAAC,CAAA;YACtB,QAAQ,CAAC,GAAG,CAAC,CAAA;QACf,CAAC,CACF,CAAA;QACD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,GAAG,CAAC,IAAI,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,WAA2B,MAAM,EAAE,WAAyC,GAAG,EAAE,GAAE,CAAC;QAC9G,GAAG,CAAC,kBAAkB,CAAC,CAAA;QACvB,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,EAAE,EAAE;YACjC,IAAI,CAAC,SAAU,CAAC,KAAK,EAAE,CAAA;YACvB,IAAI,QAAQ;gBAAE,QAAQ,CAAC,GAAG,CAAC,CAAA;QAC7B,CAAC,CAAC,CAAA;QACF,OAAO,IAAI,CAAA;IACb,CAAC;IAED,OAAO,CAAC,MAAc;QACpB,GAAG,CAAC,sBAAsB,EAAE,MAAM,CAAC,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;QACrB,OAAO,IAAI,CAAC,GAAG,EAAE,CAAA;IACnB,CAAC;IAED,QAAQ,CAAC,OAAmB;QAC1B,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,8BAA8B;YAC9B,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,qDAAqD,CAAC,CAAA;YACzE,OAAM;SACP;QACD,IAAI,CAAC,SAAU,CAAC,WAAW,EAAE,CAAA;QAC7B,IAAI,CAAC,SAAU,CAAC,WAAW,EAAE,CAAA;QAC7B,IAAI,CAAC,UAAU,GAAG,IAAI,CAAA;QACtB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAU,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAA;QAClD,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;QACpD,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;QACpD,IAAI,CAAC,iBAAiB,EAAE,CAAA;QACxB,IAAI,CAAC,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;IACpD,CAAC;IAED,iBAAiB;QACf,IAAI,CAAC,SAAU,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;YAC/B,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE;gBACpB,GAAG,CAAC,kBAAkB,CAAC,CAAA;gBACvB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;gBACrB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;aACnB;iBAAM;gBACL,IAAI,CAAC,UAAU,GAAG,KAAK,CAAA;gBACvB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;aACtB;QACH,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;IACxC,CAAC;CACF;AAED,MAAM,KAAK,GAAG,KAAK,CAAA;AAEnB,SAAS,IAAI,CAAC,IAAa;IACzB,IAAI,IAAI,YAAY,UAAU,IAAI,IAAI,YAAY,WAAW,EAAE;QAC7D,MAAM,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;QAC7C,MAAM,GAAG,GAAG,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;QAC1C,OAAO,eAAe,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,eAAe,GAAG,IAAI,CAAA;KACtE;SAAM;QACL,OAAO,IAAI,CAAA;KACZ;AACH,CAAC;AAED,SAAS,GAAG,CAAC,GAAG,IAAe;IAC7B,KAAK,IAAI,OAAO,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAA;AACzC,CAAC"}
|
32
lib/node/node_modules/pg-cloudflare/package.json
generated
vendored
Normal file
32
lib/node/node_modules/pg-cloudflare/package.json
generated
vendored
Normal file
|
@ -0,0 +1,32 @@
|
|||
{
|
||||
"name": "pg-cloudflare",
|
||||
"version": "1.1.1",
|
||||
"description": "A socket implementation that can run on Cloudflare Workers using native TCP connections.",
|
||||
"main": "dist/empty.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"ts-node": "^8.5.4",
|
||||
"typescript": "^4.0.3"
|
||||
},
|
||||
"exports": {
|
||||
"workerd": "./dist/index.js",
|
||||
"default": "./dist/empty.js"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"build:watch": "tsc --watch",
|
||||
"prepublish": "yarn build",
|
||||
"test": "echo e2e test in pg package"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/brianc/node-postgres.git",
|
||||
"directory": "packages/pg-cloudflare"
|
||||
},
|
||||
"files": [
|
||||
"/dist/*{js,ts,map}",
|
||||
"/src"
|
||||
],
|
||||
"gitHead": "eaafac36dc8f4a13f1fecc9e3420d35559fd8e2b"
|
||||
}
|
3
lib/node/node_modules/pg-cloudflare/src/empty.ts
generated
vendored
Normal file
3
lib/node/node_modules/pg-cloudflare/src/empty.ts
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
// This is an empty module that is served up when outside of a workerd environment
|
||||
// See the `exports` field in package.json
|
||||
export default {}
|
164
lib/node/node_modules/pg-cloudflare/src/index.ts
generated
vendored
Normal file
164
lib/node/node_modules/pg-cloudflare/src/index.ts
generated
vendored
Normal file
|
@ -0,0 +1,164 @@
|
|||
import { SocketOptions, Socket, TlsOptions } from 'cloudflare:sockets'
|
||||
import { EventEmitter } from 'events'
|
||||
|
||||
/**
|
||||
* Wrapper around the Cloudflare built-in socket that can be used by the `Connection`.
|
||||
*/
|
||||
export class CloudflareSocket extends EventEmitter {
|
||||
writable = false
|
||||
destroyed = false
|
||||
|
||||
private _upgrading = false
|
||||
private _upgraded = false
|
||||
private _cfSocket: Socket | null = null
|
||||
private _cfWriter: WritableStreamDefaultWriter | null = null
|
||||
private _cfReader: ReadableStreamDefaultReader | null = null
|
||||
|
||||
constructor(readonly ssl: boolean) {
|
||||
super()
|
||||
}
|
||||
|
||||
setNoDelay() {
|
||||
return this
|
||||
}
|
||||
setKeepAlive() {
|
||||
return this
|
||||
}
|
||||
ref() {
|
||||
return this
|
||||
}
|
||||
unref() {
|
||||
return this
|
||||
}
|
||||
|
||||
async connect(port: number, host: string, connectListener?: (...args: unknown[]) => void) {
|
||||
try {
|
||||
log('connecting')
|
||||
if (connectListener) this.once('connect', connectListener)
|
||||
|
||||
const options: SocketOptions = this.ssl ? { secureTransport: 'starttls' } : {}
|
||||
const { connect } = await import('cloudflare:sockets')
|
||||
this._cfSocket = connect(`${host}:${port}`, options)
|
||||
this._cfWriter = this._cfSocket.writable.getWriter()
|
||||
this._addClosedHandler()
|
||||
|
||||
this._cfReader = this._cfSocket.readable.getReader()
|
||||
if (this.ssl) {
|
||||
this._listenOnce().catch((e) => this.emit('error', e))
|
||||
} else {
|
||||
this._listen().catch((e) => this.emit('error', e))
|
||||
}
|
||||
|
||||
await this._cfWriter!.ready
|
||||
log('socket ready')
|
||||
this.writable = true
|
||||
this.emit('connect')
|
||||
|
||||
return this
|
||||
} catch (e) {
|
||||
this.emit('error', e)
|
||||
}
|
||||
}
|
||||
|
||||
async _listen() {
|
||||
while (true) {
|
||||
log('awaiting receive from CF socket')
|
||||
const { done, value } = await this._cfReader!.read()
|
||||
log('CF socket received:', done, value)
|
||||
if (done) {
|
||||
log('done')
|
||||
break
|
||||
}
|
||||
this.emit('data', Buffer.from(value))
|
||||
}
|
||||
}
|
||||
|
||||
async _listenOnce() {
|
||||
log('awaiting first receive from CF socket')
|
||||
const { done, value } = await this._cfReader!.read()
|
||||
log('First CF socket received:', done, value)
|
||||
this.emit('data', Buffer.from(value))
|
||||
}
|
||||
|
||||
write(
|
||||
data: Uint8Array | string,
|
||||
encoding: BufferEncoding = 'utf8',
|
||||
callback: (...args: unknown[]) => void = () => {}
|
||||
) {
|
||||
if (data.length === 0) return callback()
|
||||
if (typeof data === 'string') data = Buffer.from(data, encoding)
|
||||
|
||||
log('sending data direct:', data)
|
||||
this._cfWriter!.write(data).then(
|
||||
() => {
|
||||
log('data sent')
|
||||
callback()
|
||||
},
|
||||
(err) => {
|
||||
log('send error', err)
|
||||
callback(err)
|
||||
}
|
||||
)
|
||||
return true
|
||||
}
|
||||
|
||||
end(data = Buffer.alloc(0), encoding: BufferEncoding = 'utf8', callback: (...args: unknown[]) => void = () => {}) {
|
||||
log('ending CF socket')
|
||||
this.write(data, encoding, (err) => {
|
||||
this._cfSocket!.close()
|
||||
if (callback) callback(err)
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
destroy(reason: string) {
|
||||
log('destroying CF socket', reason)
|
||||
this.destroyed = true
|
||||
return this.end()
|
||||
}
|
||||
|
||||
startTls(options: TlsOptions) {
|
||||
if (this._upgraded) {
|
||||
// Don't try to upgrade again.
|
||||
this.emit('error', 'Cannot call `startTls()` more than once on a socket')
|
||||
return
|
||||
}
|
||||
this._cfWriter!.releaseLock()
|
||||
this._cfReader!.releaseLock()
|
||||
this._upgrading = true
|
||||
this._cfSocket = this._cfSocket!.startTls(options)
|
||||
this._cfWriter = this._cfSocket.writable.getWriter()
|
||||
this._cfReader = this._cfSocket.readable.getReader()
|
||||
this._addClosedHandler()
|
||||
this._listen().catch((e) => this.emit('error', e))
|
||||
}
|
||||
|
||||
_addClosedHandler() {
|
||||
this._cfSocket!.closed.then(() => {
|
||||
if (!this._upgrading) {
|
||||
log('CF socket closed')
|
||||
this._cfSocket = null
|
||||
this.emit('close')
|
||||
} else {
|
||||
this._upgrading = false
|
||||
this._upgraded = true
|
||||
}
|
||||
}).catch((e) => this.emit('error', e))
|
||||
}
|
||||
}
|
||||
|
||||
const debug = false
|
||||
|
||||
function dump(data: unknown) {
|
||||
if (data instanceof Uint8Array || data instanceof ArrayBuffer) {
|
||||
const hex = Buffer.from(data).toString('hex')
|
||||
const str = new TextDecoder().decode(data)
|
||||
return `\n>>> STR: "${str.replace(/\n/g, '\\n')}"\n>>> HEX: ${hex}\n`
|
||||
} else {
|
||||
return data
|
||||
}
|
||||
}
|
||||
|
||||
function log(...args: unknown[]) {
|
||||
debug && console.log(...args.map(dump))
|
||||
}
|
25
lib/node/node_modules/pg-cloudflare/src/types.d.ts
generated
vendored
Normal file
25
lib/node/node_modules/pg-cloudflare/src/types.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
declare module 'cloudflare:sockets' {
|
||||
export class Socket {
|
||||
public readonly readable: any
|
||||
public readonly writable: any
|
||||
public readonly closed: Promise<void>
|
||||
public close(): Promise<void>
|
||||
public startTls(options: TlsOptions): Socket
|
||||
}
|
||||
|
||||
export type TlsOptions = {
|
||||
expectedServerHostname?: string
|
||||
}
|
||||
|
||||
export type SocketAddress = {
|
||||
hostname: string
|
||||
port: number
|
||||
}
|
||||
|
||||
export type SocketOptions = {
|
||||
secureTransport?: 'off' | 'on' | 'starttls'
|
||||
allowHalfOpen?: boolean
|
||||
}
|
||||
|
||||
export function connect(address: string | SocketAddress, options?: SocketOptions): Socket
|
||||
}
|
21
lib/node/node_modules/pg-connection-string/LICENSE
generated
vendored
Normal file
21
lib/node/node_modules/pg-connection-string/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Iced Development
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
77
lib/node/node_modules/pg-connection-string/README.md
generated
vendored
Normal file
77
lib/node/node_modules/pg-connection-string/README.md
generated
vendored
Normal file
|
@ -0,0 +1,77 @@
|
|||
pg-connection-string
|
||||
====================
|
||||
|
||||
[](https://nodei.co/npm/pg-connection-string/)
|
||||
|
||||
[](https://travis-ci.org/iceddev/pg-connection-string)
|
||||
[](https://coveralls.io/github/iceddev/pg-connection-string?branch=master)
|
||||
|
||||
Functions for dealing with a PostgresSQL connection string
|
||||
|
||||
`parse` method taken from [node-postgres](https://github.com/brianc/node-postgres.git)
|
||||
Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com)
|
||||
MIT License
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
var parse = require('pg-connection-string').parse;
|
||||
|
||||
var config = parse('postgres://someuser:somepassword@somehost:381/somedatabase')
|
||||
```
|
||||
|
||||
The resulting config contains a subset of the following properties:
|
||||
|
||||
* `host` - Postgres server hostname or, for UNIX domain sockets, the socket filename
|
||||
* `port` - port on which to connect
|
||||
* `user` - User with which to authenticate to the server
|
||||
* `password` - Corresponding password
|
||||
* `database` - Database name within the server
|
||||
* `client_encoding` - string encoding the client will use
|
||||
* `ssl`, either a boolean or an object with properties
|
||||
* `rejectUnauthorized`
|
||||
* `cert`
|
||||
* `key`
|
||||
* `ca`
|
||||
* any other query parameters (for example, `application_name`) are preserved intact.
|
||||
|
||||
## Connection Strings
|
||||
|
||||
The short summary of acceptable URLs is:
|
||||
|
||||
* `socket:<path>?<query>` - UNIX domain socket
|
||||
* `postgres://<user>:<password>@<host>:<port>/<database>?<query>` - TCP connection
|
||||
|
||||
But see below for more details.
|
||||
|
||||
### UNIX Domain Sockets
|
||||
|
||||
When user and password are not given, the socket path follows `socket:`, as in `socket:/var/run/pgsql`.
|
||||
This form can be shortened to just a path: `/var/run/pgsql`.
|
||||
|
||||
When user and password are given, they are included in the typical URL positions, with an empty `host`, as in `socket://user:pass@/var/run/pgsql`.
|
||||
|
||||
Query parameters follow a `?` character, including the following special query parameters:
|
||||
|
||||
* `db=<database>` - sets the database name (urlencoded)
|
||||
* `encoding=<encoding>` - sets the `client_encoding` property
|
||||
|
||||
### TCP Connections
|
||||
|
||||
TCP connections to the Postgres server are indicated with `pg:` or `postgres:` schemes (in fact, any scheme but `socket:` is accepted).
|
||||
If username and password are included, they should be urlencoded.
|
||||
The database name, however, should *not* be urlencoded.
|
||||
|
||||
Query parameters follow a `?` character, including the following special query parameters:
|
||||
* `host=<host>` - sets `host` property, overriding the URL's host
|
||||
* `encoding=<encoding>` - sets the `client_encoding` property
|
||||
* `ssl=1`, `ssl=true`, `ssl=0`, `ssl=false` - sets `ssl` to true or false, accordingly
|
||||
* `sslmode=<sslmode>`
|
||||
* `sslmode=disable` - sets `ssl` to false
|
||||
* `sslmode=no-verify` - sets `ssl` to `{ rejectUnauthorized: false }`
|
||||
* `sslmode=prefer`, `sslmode=require`, `sslmode=verify-ca`, `sslmode=verify-full` - sets `ssl` to true
|
||||
* `sslcert=<filename>` - reads data from the given file and includes the result as `ssl.cert`
|
||||
* `sslkey=<filename>` - reads data from the given file and includes the result as `ssl.key`
|
||||
* `sslrootcert=<filename>` - reads data from the given file and includes the result as `ssl.ca`
|
||||
|
||||
A bare relative URL, such as `salesdata`, will indicate a database name while leaving other properties empty.
|
15
lib/node/node_modules/pg-connection-string/index.d.ts
generated
vendored
Normal file
15
lib/node/node_modules/pg-connection-string/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
export function parse(connectionString: string): ConnectionOptions
|
||||
|
||||
export interface ConnectionOptions {
|
||||
host: string | null
|
||||
password?: string
|
||||
user?: string
|
||||
port?: string | null
|
||||
database: string | null | undefined
|
||||
client_encoding?: string
|
||||
ssl?: boolean | string
|
||||
|
||||
application_name?: string
|
||||
fallback_application_name?: string
|
||||
options?: string
|
||||
}
|
112
lib/node/node_modules/pg-connection-string/index.js
generated
vendored
Normal file
112
lib/node/node_modules/pg-connection-string/index.js
generated
vendored
Normal file
|
@ -0,0 +1,112 @@
|
|||
'use strict'
|
||||
|
||||
//Parse method copied from https://github.com/brianc/node-postgres
|
||||
//Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com)
|
||||
//MIT License
|
||||
|
||||
//parses a connection string
|
||||
function parse(str) {
|
||||
//unix socket
|
||||
if (str.charAt(0) === '/') {
|
||||
const config = str.split(' ')
|
||||
return { host: config[0], database: config[1] }
|
||||
}
|
||||
|
||||
// Check for empty host in URL
|
||||
|
||||
const config = {}
|
||||
let result
|
||||
let dummyHost = false
|
||||
if (/ |%[^a-f0-9]|%[a-f0-9][^a-f0-9]/i.test(str)) {
|
||||
// Ensure spaces are encoded as %20
|
||||
str = encodeURI(str).replace(/\%25(\d\d)/g, '%$1')
|
||||
}
|
||||
|
||||
try {
|
||||
result = new URL(str, 'postgres://base')
|
||||
} catch (e) {
|
||||
// The URL is invalid so try again with a dummy host
|
||||
result = new URL(str.replace('@/', '@___DUMMY___/'), 'postgres://base')
|
||||
dummyHost = true
|
||||
}
|
||||
|
||||
// We'd like to use Object.fromEntries() here but Node.js 10 does not support it
|
||||
for (const entry of result.searchParams.entries()) {
|
||||
config[entry[0]] = entry[1]
|
||||
}
|
||||
|
||||
config.user = config.user || decodeURIComponent(result.username)
|
||||
config.password = config.password || decodeURIComponent(result.password)
|
||||
|
||||
if (result.protocol == 'socket:') {
|
||||
config.host = decodeURI(result.pathname)
|
||||
config.database = result.searchParams.get('db')
|
||||
config.client_encoding = result.searchParams.get('encoding')
|
||||
return config
|
||||
}
|
||||
const hostname = dummyHost ? '' : result.hostname
|
||||
if (!config.host) {
|
||||
// Only set the host if there is no equivalent query param.
|
||||
config.host = decodeURIComponent(hostname)
|
||||
} else if (hostname && /^%2f/i.test(hostname)) {
|
||||
// Only prepend the hostname to the pathname if it is not a URL encoded Unix socket host.
|
||||
result.pathname = hostname + result.pathname
|
||||
}
|
||||
if (!config.port) {
|
||||
// Only set the port if there is no equivalent query param.
|
||||
config.port = result.port
|
||||
}
|
||||
|
||||
const pathname = result.pathname.slice(1) || null
|
||||
config.database = pathname ? decodeURI(pathname) : null
|
||||
|
||||
if (config.ssl === 'true' || config.ssl === '1') {
|
||||
config.ssl = true
|
||||
}
|
||||
|
||||
if (config.ssl === '0') {
|
||||
config.ssl = false
|
||||
}
|
||||
|
||||
if (config.sslcert || config.sslkey || config.sslrootcert || config.sslmode) {
|
||||
config.ssl = {}
|
||||
}
|
||||
|
||||
// Only try to load fs if we expect to read from the disk
|
||||
const fs = config.sslcert || config.sslkey || config.sslrootcert ? require('fs') : null
|
||||
|
||||
if (config.sslcert) {
|
||||
config.ssl.cert = fs.readFileSync(config.sslcert).toString()
|
||||
}
|
||||
|
||||
if (config.sslkey) {
|
||||
config.ssl.key = fs.readFileSync(config.sslkey).toString()
|
||||
}
|
||||
|
||||
if (config.sslrootcert) {
|
||||
config.ssl.ca = fs.readFileSync(config.sslrootcert).toString()
|
||||
}
|
||||
|
||||
switch (config.sslmode) {
|
||||
case 'disable': {
|
||||
config.ssl = false
|
||||
break
|
||||
}
|
||||
case 'prefer':
|
||||
case 'require':
|
||||
case 'verify-ca':
|
||||
case 'verify-full': {
|
||||
break
|
||||
}
|
||||
case 'no-verify': {
|
||||
config.ssl.rejectUnauthorized = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
module.exports = parse
|
||||
|
||||
parse.parse = parse
|
40
lib/node/node_modules/pg-connection-string/package.json
generated
vendored
Normal file
40
lib/node/node_modules/pg-connection-string/package.json
generated
vendored
Normal file
|
@ -0,0 +1,40 @@
|
|||
{
|
||||
"name": "pg-connection-string",
|
||||
"version": "2.6.4",
|
||||
"description": "Functions for dealing with a PostgresSQL connection string",
|
||||
"main": "./index.js",
|
||||
"types": "./index.d.ts",
|
||||
"scripts": {
|
||||
"test": "istanbul cover _mocha && npm run check-coverage",
|
||||
"check-coverage": "istanbul check-coverage --statements 100 --branches 100 --lines 100 --functions 100",
|
||||
"coveralls": "cat ./coverage/lcov.info | ./node_modules/.bin/coveralls"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/brianc/node-postgres.git",
|
||||
"directory": "packages/pg-connection-string"
|
||||
},
|
||||
"keywords": [
|
||||
"pg",
|
||||
"connection",
|
||||
"string",
|
||||
"parse"
|
||||
],
|
||||
"author": "Blaine Bublitz <blaine@iceddev.com> (http://iceddev.com/)",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/brianc/node-postgres/issues"
|
||||
},
|
||||
"homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string",
|
||||
"devDependencies": {
|
||||
"chai": "^4.1.1",
|
||||
"coveralls": "^3.0.4",
|
||||
"istanbul": "^0.4.5",
|
||||
"mocha": "^7.1.2"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"gitHead": "a37a93bf7990220517a40cf16b8e72d4c3e6cef5"
|
||||
}
|
13
lib/node/node_modules/pg-int8/LICENSE
generated
vendored
Normal file
13
lib/node/node_modules/pg-int8/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
Copyright © 2017, Charmander <~@charmander.me>
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
PERFORMANCE OF THIS SOFTWARE.
|
16
lib/node/node_modules/pg-int8/README.md
generated
vendored
Normal file
16
lib/node/node_modules/pg-int8/README.md
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
[![Build status][ci image]][ci]
|
||||
|
||||
64-bit big-endian signed integer-to-string conversion designed for [pg][].
|
||||
|
||||
```js
|
||||
const readInt8 = require('pg-int8');
|
||||
|
||||
readInt8(Buffer.from([0, 1, 2, 3, 4, 5, 6, 7]))
|
||||
// '283686952306183'
|
||||
```
|
||||
|
||||
|
||||
[pg]: https://github.com/brianc/node-postgres
|
||||
|
||||
[ci]: https://travis-ci.org/charmander/pg-int8
|
||||
[ci image]: https://api.travis-ci.org/charmander/pg-int8.svg
|
100
lib/node/node_modules/pg-int8/index.js
generated
vendored
Normal file
100
lib/node/node_modules/pg-int8/index.js
generated
vendored
Normal file
|
@ -0,0 +1,100 @@
|
|||
'use strict';
|
||||
|
||||
// selected so (BASE - 1) * 0x100000000 + 0xffffffff is a safe integer
|
||||
var BASE = 1000000;
|
||||
|
||||
function readInt8(buffer) {
|
||||
var high = buffer.readInt32BE(0);
|
||||
var low = buffer.readUInt32BE(4);
|
||||
var sign = '';
|
||||
|
||||
if (high < 0) {
|
||||
high = ~high + (low === 0);
|
||||
low = (~low + 1) >>> 0;
|
||||
sign = '-';
|
||||
}
|
||||
|
||||
var result = '';
|
||||
var carry;
|
||||
var t;
|
||||
var digits;
|
||||
var pad;
|
||||
var l;
|
||||
var i;
|
||||
|
||||
{
|
||||
carry = high % BASE;
|
||||
high = high / BASE >>> 0;
|
||||
|
||||
t = 0x100000000 * carry + low;
|
||||
low = t / BASE >>> 0;
|
||||
digits = '' + (t - BASE * low);
|
||||
|
||||
if (low === 0 && high === 0) {
|
||||
return sign + digits + result;
|
||||
}
|
||||
|
||||
pad = '';
|
||||
l = 6 - digits.length;
|
||||
|
||||
for (i = 0; i < l; i++) {
|
||||
pad += '0';
|
||||
}
|
||||
|
||||
result = pad + digits + result;
|
||||
}
|
||||
|
||||
{
|
||||
carry = high % BASE;
|
||||
high = high / BASE >>> 0;
|
||||
|
||||
t = 0x100000000 * carry + low;
|
||||
low = t / BASE >>> 0;
|
||||
digits = '' + (t - BASE * low);
|
||||
|
||||
if (low === 0 && high === 0) {
|
||||
return sign + digits + result;
|
||||
}
|
||||
|
||||
pad = '';
|
||||
l = 6 - digits.length;
|
||||
|
||||
for (i = 0; i < l; i++) {
|
||||
pad += '0';
|
||||
}
|
||||
|
||||
result = pad + digits + result;
|
||||
}
|
||||
|
||||
{
|
||||
carry = high % BASE;
|
||||
high = high / BASE >>> 0;
|
||||
|
||||
t = 0x100000000 * carry + low;
|
||||
low = t / BASE >>> 0;
|
||||
digits = '' + (t - BASE * low);
|
||||
|
||||
if (low === 0 && high === 0) {
|
||||
return sign + digits + result;
|
||||
}
|
||||
|
||||
pad = '';
|
||||
l = 6 - digits.length;
|
||||
|
||||
for (i = 0; i < l; i++) {
|
||||
pad += '0';
|
||||
}
|
||||
|
||||
result = pad + digits + result;
|
||||
}
|
||||
|
||||
{
|
||||
carry = high % BASE;
|
||||
t = 0x100000000 * carry + low;
|
||||
digits = '' + t % BASE;
|
||||
|
||||
return sign + digits + result;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = readInt8;
|
24
lib/node/node_modules/pg-int8/package.json
generated
vendored
Normal file
24
lib/node/node_modules/pg-int8/package.json
generated
vendored
Normal file
|
@ -0,0 +1,24 @@
|
|||
{
|
||||
"name": "pg-int8",
|
||||
"version": "1.0.1",
|
||||
"description": "64-bit big-endian signed integer-to-string conversion",
|
||||
"bugs": "https://github.com/charmander/pg-int8/issues",
|
||||
"license": "ISC",
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/charmander/pg-int8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap test"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@charmander/eslint-config-base": "1.0.2",
|
||||
"tap": "10.7.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4.0.0"
|
||||
}
|
||||
}
|
21
lib/node/node_modules/pg-pool/LICENSE
generated
vendored
Normal file
21
lib/node/node_modules/pg-pool/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2017 Brian M. Carlson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
376
lib/node/node_modules/pg-pool/README.md
generated
vendored
Normal file
376
lib/node/node_modules/pg-pool/README.md
generated
vendored
Normal file
|
@ -0,0 +1,376 @@
|
|||
# pg-pool
|
||||
[](https://travis-ci.org/brianc/node-pg-pool)
|
||||
|
||||
A connection pool for node-postgres
|
||||
|
||||
## install
|
||||
```sh
|
||||
npm i pg-pool pg
|
||||
```
|
||||
|
||||
## use
|
||||
|
||||
### create
|
||||
|
||||
to use pg-pool you must first create an instance of a pool
|
||||
|
||||
```js
|
||||
var Pool = require('pg-pool')
|
||||
|
||||
// by default the pool uses the same
|
||||
// configuration as whatever `pg` version you have installed
|
||||
var pool = new Pool()
|
||||
|
||||
// you can pass properties to the pool
|
||||
// these properties are passed unchanged to both the node-postgres Client constructor
|
||||
// and the node-pool (https://github.com/coopernurse/node-pool) constructor
|
||||
// allowing you to fully configure the behavior of both
|
||||
var pool2 = new Pool({
|
||||
database: 'postgres',
|
||||
user: 'brianc',
|
||||
password: 'secret!',
|
||||
port: 5432,
|
||||
ssl: true,
|
||||
max: 20, // set pool max size to 20
|
||||
idleTimeoutMillis: 1000, // close idle clients after 1 second
|
||||
connectionTimeoutMillis: 1000, // return an error after 1 second if connection could not be established
|
||||
maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion)
|
||||
})
|
||||
|
||||
//you can supply a custom client constructor
|
||||
//if you want to use the native postgres client
|
||||
var NativeClient = require('pg').native.Client
|
||||
var nativePool = new Pool({ Client: NativeClient })
|
||||
|
||||
//you can even pool pg-native clients directly
|
||||
var PgNativeClient = require('pg-native')
|
||||
var pgNativePool = new Pool({ Client: PgNativeClient })
|
||||
```
|
||||
|
||||
##### Note:
|
||||
The Pool constructor does not support passing a Database URL as the parameter. To use pg-pool on heroku, for example, you need to parse the URL into a config object. Here is an example of how to parse a Database URL.
|
||||
|
||||
```js
|
||||
const Pool = require('pg-pool');
|
||||
const url = require('url')
|
||||
|
||||
const params = url.parse(process.env.DATABASE_URL);
|
||||
const auth = params.auth.split(':');
|
||||
|
||||
const config = {
|
||||
user: auth[0],
|
||||
password: auth[1],
|
||||
host: params.hostname,
|
||||
port: params.port,
|
||||
database: params.pathname.split('/')[1],
|
||||
ssl: true
|
||||
};
|
||||
|
||||
const pool = new Pool(config);
|
||||
|
||||
/*
|
||||
Transforms, 'postgres://DBuser:secret@DBHost:#####/myDB', into
|
||||
config = {
|
||||
user: 'DBuser',
|
||||
password: 'secret',
|
||||
host: 'DBHost',
|
||||
port: '#####',
|
||||
database: 'myDB',
|
||||
ssl: true
|
||||
}
|
||||
*/
|
||||
```
|
||||
|
||||
### acquire clients with a promise
|
||||
|
||||
pg-pool supports a fully promise-based api for acquiring clients
|
||||
|
||||
```js
|
||||
var pool = new Pool()
|
||||
pool.connect().then(client => {
|
||||
client.query('select $1::text as name', ['pg-pool']).then(res => {
|
||||
client.release()
|
||||
console.log('hello from', res.rows[0].name)
|
||||
})
|
||||
.catch(e => {
|
||||
client.release()
|
||||
console.error('query error', e.message, e.stack)
|
||||
})
|
||||
})
|
||||
```
|
||||
|
||||
### plays nice with async/await
|
||||
|
||||
this ends up looking much nicer if you're using [co](https://github.com/tj/co) or async/await:
|
||||
|
||||
```js
|
||||
// with async/await
|
||||
(async () => {
|
||||
var pool = new Pool()
|
||||
var client = await pool.connect()
|
||||
try {
|
||||
var result = await client.query('select $1::text as name', ['brianc'])
|
||||
console.log('hello from', result.rows[0])
|
||||
} finally {
|
||||
client.release()
|
||||
}
|
||||
})().catch(e => console.error(e.message, e.stack))
|
||||
|
||||
// with co
|
||||
co(function * () {
|
||||
var client = yield pool.connect()
|
||||
try {
|
||||
var result = yield client.query('select $1::text as name', ['brianc'])
|
||||
console.log('hello from', result.rows[0])
|
||||
} finally {
|
||||
client.release()
|
||||
}
|
||||
}).catch(e => console.error(e.message, e.stack))
|
||||
```
|
||||
|
||||
### your new favorite helper method
|
||||
|
||||
because its so common to just run a query and return the client to the pool afterward pg-pool has this built-in:
|
||||
|
||||
```js
|
||||
var pool = new Pool()
|
||||
var time = await pool.query('SELECT NOW()')
|
||||
var name = await pool.query('select $1::text as name', ['brianc'])
|
||||
console.log(name.rows[0].name, 'says hello at', time.rows[0].now)
|
||||
```
|
||||
|
||||
you can also use a callback here if you'd like:
|
||||
|
||||
```js
|
||||
var pool = new Pool()
|
||||
pool.query('SELECT $1::text as name', ['brianc'], function (err, res) {
|
||||
console.log(res.rows[0].name) // brianc
|
||||
})
|
||||
```
|
||||
|
||||
__pro tip:__ unless you need to run a transaction (which requires a single client for multiple queries) or you
|
||||
have some other edge case like [streaming rows](https://github.com/brianc/node-pg-query-stream) or using a [cursor](https://github.com/brianc/node-pg-cursor)
|
||||
you should almost always just use `pool.query`. Its easy, it does the right thing :tm:, and wont ever forget to return
|
||||
clients back to the pool after the query is done.
|
||||
|
||||
### drop-in backwards compatible
|
||||
|
||||
pg-pool still and will always support the traditional callback api for acquiring a client. This is the exact API node-postgres has shipped with for years:
|
||||
|
||||
```js
|
||||
var pool = new Pool()
|
||||
pool.connect((err, client, done) => {
|
||||
if (err) return done(err)
|
||||
|
||||
client.query('SELECT $1::text as name', ['pg-pool'], (err, res) => {
|
||||
done()
|
||||
if (err) {
|
||||
return console.error('query error', err.message, err.stack)
|
||||
}
|
||||
console.log('hello from', res.rows[0].name)
|
||||
})
|
||||
})
|
||||
```
|
||||
|
||||
### shut it down
|
||||
|
||||
When you are finished with the pool if all the clients are idle the pool will close them after `config.idleTimeoutMillis` and your app
|
||||
will shutdown gracefully. If you don't want to wait for the timeout you can end the pool as follows:
|
||||
|
||||
```js
|
||||
var pool = new Pool()
|
||||
var client = await pool.connect()
|
||||
console.log(await client.query('select now()'))
|
||||
client.release()
|
||||
await pool.end()
|
||||
```
|
||||
|
||||
### a note on instances
|
||||
|
||||
The pool should be a __long-lived object__ in your application. Generally you'll want to instantiate one pool when your app starts up and use the same instance of the pool throughout the lifetime of your application. If you are frequently creating a new pool within your code you likely don't have your pool initialization code in the correct place. Example:
|
||||
|
||||
```js
|
||||
// assume this is a file in your program at ./your-app/lib/db.js
|
||||
|
||||
// correct usage: create the pool and let it live
|
||||
// 'globally' here, controlling access to it through exported methods
|
||||
var pool = new pg.Pool()
|
||||
|
||||
// this is the right way to export the query method
|
||||
module.exports.query = (text, values) => {
|
||||
console.log('query:', text, values)
|
||||
return pool.query(text, values)
|
||||
}
|
||||
|
||||
// this would be the WRONG way to export the connect method
|
||||
module.exports.connect = () => {
|
||||
// notice how we would be creating a pool instance here
|
||||
// every time we called 'connect' to get a new client?
|
||||
// that's a bad thing & results in creating an unbounded
|
||||
// number of pools & therefore connections
|
||||
var aPool = new pg.Pool()
|
||||
return aPool.connect()
|
||||
}
|
||||
```
|
||||
|
||||
### events
|
||||
|
||||
Every instance of a `Pool` is an event emitter. These instances emit the following events:
|
||||
|
||||
#### error
|
||||
|
||||
Emitted whenever an idle client in the pool encounters an error. This is common when your PostgreSQL server shuts down, reboots, or a network partition otherwise causes it to become unavailable while your pool has connected clients.
|
||||
|
||||
Example:
|
||||
|
||||
```js
|
||||
const Pool = require('pg-pool')
|
||||
const pool = new Pool()
|
||||
|
||||
// attach an error handler to the pool for when a connected, idle client
|
||||
// receives an error by being disconnected, etc
|
||||
pool.on('error', function(error, client) {
|
||||
// handle this in the same way you would treat process.on('uncaughtException')
|
||||
// it is supplied the error as well as the idle client which received the error
|
||||
})
|
||||
```
|
||||
|
||||
#### connect
|
||||
|
||||
Fired whenever the pool creates a __new__ `pg.Client` instance and successfully connects it to the backend.
|
||||
|
||||
Example:
|
||||
|
||||
```js
|
||||
const Pool = require('pg-pool')
|
||||
const pool = new Pool()
|
||||
|
||||
var count = 0
|
||||
|
||||
pool.on('connect', client => {
|
||||
client.count = count++
|
||||
})
|
||||
|
||||
pool
|
||||
.connect()
|
||||
.then(client => {
|
||||
return client
|
||||
.query('SELECT $1::int AS "clientCount"', [client.count])
|
||||
.then(res => console.log(res.rows[0].clientCount)) // outputs 0
|
||||
.then(() => client)
|
||||
})
|
||||
.then(client => client.release())
|
||||
|
||||
```
|
||||
|
||||
#### acquire
|
||||
|
||||
Fired whenever the a client is acquired from the pool
|
||||
|
||||
Example:
|
||||
|
||||
This allows you to count the number of clients which have ever been acquired from the pool.
|
||||
|
||||
```js
|
||||
var Pool = require('pg-pool')
|
||||
var pool = new Pool()
|
||||
|
||||
var acquireCount = 0
|
||||
pool.on('acquire', function (client) {
|
||||
acquireCount++
|
||||
})
|
||||
|
||||
var connectCount = 0
|
||||
pool.on('connect', function () {
|
||||
connectCount++
|
||||
})
|
||||
|
||||
for (var i = 0; i < 200; i++) {
|
||||
pool.query('SELECT NOW()')
|
||||
}
|
||||
|
||||
setTimeout(function () {
|
||||
console.log('connect count:', connectCount) // output: connect count: 10
|
||||
console.log('acquire count:', acquireCount) // output: acquire count: 200
|
||||
}, 100)
|
||||
|
||||
```
|
||||
|
||||
### environment variables
|
||||
|
||||
pg-pool & node-postgres support some of the same environment variables as `psql` supports. The most common are:
|
||||
|
||||
```
|
||||
PGDATABASE=my_db
|
||||
PGUSER=username
|
||||
PGPASSWORD="my awesome password"
|
||||
PGPORT=5432
|
||||
PGSSLMODE=require
|
||||
```
|
||||
|
||||
Usually I will export these into my local environment via a `.env` file with environment settings or export them in `~/.bash_profile` or something similar. This way I get configurability which works with both the postgres suite of tools (`psql`, `pg_dump`, `pg_restore`) and node, I can vary the environment variables locally and in production, and it supports the concept of a [12-factor app](http://12factor.net/) out of the box.
|
||||
|
||||
## bring your own promise
|
||||
|
||||
In versions of node `<=0.12.x` there is no native promise implementation available globally. You can polyfill the promise globally like this:
|
||||
|
||||
```js
|
||||
// first run `npm install promise-polyfill --save
|
||||
if (typeof Promise == 'undefined') {
|
||||
global.Promise = require('promise-polyfill')
|
||||
}
|
||||
```
|
||||
|
||||
You can use any other promise implementation you'd like. The pool also allows you to configure the promise implementation on a per-pool level:
|
||||
|
||||
```js
|
||||
var bluebirdPool = new Pool({
|
||||
Promise: require('bluebird')
|
||||
})
|
||||
```
|
||||
|
||||
__please note:__ in node `<=0.12.x` the pool will throw if you do not provide a promise constructor in one of the two ways mentioned above. In node `>=4.0.0` the pool will use the native promise implementation by default; however, the two methods above still allow you to "bring your own."
|
||||
|
||||
## maxUses and read-replica autoscaling (e.g. AWS Aurora)
|
||||
|
||||
The maxUses config option can help an application instance rebalance load against a replica set that has been auto-scaled after the connection pool is already full of healthy connections.
|
||||
|
||||
The mechanism here is that a connection is considered "expended" after it has been acquired and released `maxUses` number of times. Depending on the load on your system, this means there will be an approximate time in which any given connection will live, thus creating a window for rebalancing.
|
||||
|
||||
Imagine a scenario where you have 10 app instances providing an API running against a replica cluster of 3 that are accessed via a round-robin DNS entry. Each instance runs a connection pool size of 20. With an ambient load of 50 requests per second, the connection pool will likely fill up in a few minutes with healthy connections.
|
||||
|
||||
If you have weekly bursts of traffic which peak at 1,000 requests per second, you might want to grow your replicas to 10 during this period. Without setting `maxUses`, the new replicas will not be adopted by the app servers without an intervention -- namely, restarting each in turn in order to build up new connection pools that are balanced against all the replicas. Adding additional app server instances will help to some extent because they will adopt all the replicas in an even way, but the initial app servers will continue to focus additional load on the original replicas.
|
||||
|
||||
This is where the `maxUses` configuration option comes into play. Setting `maxUses` to 7500 will ensure that over a period of 30 minutes or so the new replicas will be adopted as the pre-existing connections are closed and replaced with new ones, thus creating a window for eventual balance.
|
||||
|
||||
You'll want to test based on your own scenarios, but one way to make a first guess at `maxUses` is to identify an acceptable window for rebalancing and then solve for the value:
|
||||
|
||||
```
|
||||
maxUses = rebalanceWindowSeconds * totalRequestsPerSecond / numAppInstances / poolSize
|
||||
```
|
||||
|
||||
In the example above, assuming we acquire and release 1 connection per request and we are aiming for a 30 minute rebalancing window:
|
||||
|
||||
```
|
||||
maxUses = rebalanceWindowSeconds * totalRequestsPerSecond / numAppInstances / poolSize
|
||||
7200 = 1800 * 1000 / 10 / 25
|
||||
```
|
||||
|
||||
## tests
|
||||
|
||||
To run tests clone the repo, `npm i` in the working dir, and then run `npm test`
|
||||
|
||||
## contributions
|
||||
|
||||
I love contributions. Please make sure they have tests, and submit a PR. If you're not sure if the issue is worth it or will be accepted it never hurts to open an issue to begin the conversation. If you're interested in keeping up with node-postgres releated stuff, you can follow me on twitter at [@briancarlson](https://twitter.com/briancarlson) - I generally announce any noteworthy updates there.
|
||||
|
||||
## license
|
||||
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2016 Brian M. Carlson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
467
lib/node/node_modules/pg-pool/index.js
generated
vendored
Normal file
467
lib/node/node_modules/pg-pool/index.js
generated
vendored
Normal file
|
@ -0,0 +1,467 @@
|
|||
'use strict'
|
||||
const EventEmitter = require('events').EventEmitter
|
||||
|
||||
const NOOP = function () {}
|
||||
|
||||
const removeWhere = (list, predicate) => {
|
||||
const i = list.findIndex(predicate)
|
||||
|
||||
return i === -1 ? undefined : list.splice(i, 1)[0]
|
||||
}
|
||||
|
||||
class IdleItem {
|
||||
constructor(client, idleListener, timeoutId) {
|
||||
this.client = client
|
||||
this.idleListener = idleListener
|
||||
this.timeoutId = timeoutId
|
||||
}
|
||||
}
|
||||
|
||||
class PendingItem {
|
||||
constructor(callback) {
|
||||
this.callback = callback
|
||||
}
|
||||
}
|
||||
|
||||
function throwOnDoubleRelease() {
|
||||
throw new Error('Release called on client which has already been released to the pool.')
|
||||
}
|
||||
|
||||
function promisify(Promise, callback) {
|
||||
if (callback) {
|
||||
return { callback: callback, result: undefined }
|
||||
}
|
||||
let rej
|
||||
let res
|
||||
const cb = function (err, client) {
|
||||
err ? rej(err) : res(client)
|
||||
}
|
||||
const result = new Promise(function (resolve, reject) {
|
||||
res = resolve
|
||||
rej = reject
|
||||
}).catch((err) => {
|
||||
// replace the stack trace that leads to `TCP.onStreamRead` with one that leads back to the
|
||||
// application that created the query
|
||||
Error.captureStackTrace(err)
|
||||
throw err
|
||||
})
|
||||
return { callback: cb, result: result }
|
||||
}
|
||||
|
||||
function makeIdleListener(pool, client) {
|
||||
return function idleListener(err) {
|
||||
err.client = client
|
||||
|
||||
client.removeListener('error', idleListener)
|
||||
client.on('error', () => {
|
||||
pool.log('additional client error after disconnection due to error', err)
|
||||
})
|
||||
pool._remove(client)
|
||||
// TODO - document that once the pool emits an error
|
||||
// the client has already been closed & purged and is unusable
|
||||
pool.emit('error', err, client)
|
||||
}
|
||||
}
|
||||
|
||||
class Pool extends EventEmitter {
|
||||
constructor(options, Client) {
|
||||
super()
|
||||
this.options = Object.assign({}, options)
|
||||
|
||||
if (options != null && 'password' in options) {
|
||||
// "hiding" the password so it doesn't show up in stack traces
|
||||
// or if the client is console.logged
|
||||
Object.defineProperty(this.options, 'password', {
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
value: options.password,
|
||||
})
|
||||
}
|
||||
if (options != null && options.ssl && options.ssl.key) {
|
||||
// "hiding" the ssl->key so it doesn't show up in stack traces
|
||||
// or if the client is console.logged
|
||||
Object.defineProperty(this.options.ssl, 'key', {
|
||||
enumerable: false,
|
||||
})
|
||||
}
|
||||
|
||||
this.options.max = this.options.max || this.options.poolSize || 10
|
||||
this.options.maxUses = this.options.maxUses || Infinity
|
||||
this.options.allowExitOnIdle = this.options.allowExitOnIdle || false
|
||||
this.options.maxLifetimeSeconds = this.options.maxLifetimeSeconds || 0
|
||||
this.log = this.options.log || function () {}
|
||||
this.Client = this.options.Client || Client || require('pg').Client
|
||||
this.Promise = this.options.Promise || global.Promise
|
||||
|
||||
if (typeof this.options.idleTimeoutMillis === 'undefined') {
|
||||
this.options.idleTimeoutMillis = 10000
|
||||
}
|
||||
|
||||
this._clients = []
|
||||
this._idle = []
|
||||
this._expired = new WeakSet()
|
||||
this._pendingQueue = []
|
||||
this._endCallback = undefined
|
||||
this.ending = false
|
||||
this.ended = false
|
||||
}
|
||||
|
||||
_isFull() {
|
||||
return this._clients.length >= this.options.max
|
||||
}
|
||||
|
||||
_pulseQueue() {
|
||||
this.log('pulse queue')
|
||||
if (this.ended) {
|
||||
this.log('pulse queue ended')
|
||||
return
|
||||
}
|
||||
if (this.ending) {
|
||||
this.log('pulse queue on ending')
|
||||
if (this._idle.length) {
|
||||
this._idle.slice().map((item) => {
|
||||
this._remove(item.client)
|
||||
})
|
||||
}
|
||||
if (!this._clients.length) {
|
||||
this.ended = true
|
||||
this._endCallback()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// if we don't have any waiting, do nothing
|
||||
if (!this._pendingQueue.length) {
|
||||
this.log('no queued requests')
|
||||
return
|
||||
}
|
||||
// if we don't have any idle clients and we have no more room do nothing
|
||||
if (!this._idle.length && this._isFull()) {
|
||||
return
|
||||
}
|
||||
const pendingItem = this._pendingQueue.shift()
|
||||
if (this._idle.length) {
|
||||
const idleItem = this._idle.pop()
|
||||
clearTimeout(idleItem.timeoutId)
|
||||
const client = idleItem.client
|
||||
client.ref && client.ref()
|
||||
const idleListener = idleItem.idleListener
|
||||
|
||||
return this._acquireClient(client, pendingItem, idleListener, false)
|
||||
}
|
||||
if (!this._isFull()) {
|
||||
return this.newClient(pendingItem)
|
||||
}
|
||||
throw new Error('unexpected condition')
|
||||
}
|
||||
|
||||
_remove(client) {
|
||||
const removed = removeWhere(this._idle, (item) => item.client === client)
|
||||
|
||||
if (removed !== undefined) {
|
||||
clearTimeout(removed.timeoutId)
|
||||
}
|
||||
|
||||
this._clients = this._clients.filter((c) => c !== client)
|
||||
client.end()
|
||||
this.emit('remove', client)
|
||||
}
|
||||
|
||||
connect(cb) {
|
||||
if (this.ending) {
|
||||
const err = new Error('Cannot use a pool after calling end on the pool')
|
||||
return cb ? cb(err) : this.Promise.reject(err)
|
||||
}
|
||||
|
||||
const response = promisify(this.Promise, cb)
|
||||
const result = response.result
|
||||
|
||||
// if we don't have to connect a new client, don't do so
|
||||
if (this._isFull() || this._idle.length) {
|
||||
// if we have idle clients schedule a pulse immediately
|
||||
if (this._idle.length) {
|
||||
process.nextTick(() => this._pulseQueue())
|
||||
}
|
||||
|
||||
if (!this.options.connectionTimeoutMillis) {
|
||||
this._pendingQueue.push(new PendingItem(response.callback))
|
||||
return result
|
||||
}
|
||||
|
||||
const queueCallback = (err, res, done) => {
|
||||
clearTimeout(tid)
|
||||
response.callback(err, res, done)
|
||||
}
|
||||
|
||||
const pendingItem = new PendingItem(queueCallback)
|
||||
|
||||
// set connection timeout on checking out an existing client
|
||||
const tid = setTimeout(() => {
|
||||
// remove the callback from pending waiters because
|
||||
// we're going to call it with a timeout error
|
||||
removeWhere(this._pendingQueue, (i) => i.callback === queueCallback)
|
||||
pendingItem.timedOut = true
|
||||
response.callback(new Error('timeout exceeded when trying to connect'))
|
||||
}, this.options.connectionTimeoutMillis)
|
||||
|
||||
this._pendingQueue.push(pendingItem)
|
||||
return result
|
||||
}
|
||||
|
||||
this.newClient(new PendingItem(response.callback))
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
newClient(pendingItem) {
|
||||
const client = new this.Client(this.options)
|
||||
this._clients.push(client)
|
||||
const idleListener = makeIdleListener(this, client)
|
||||
|
||||
this.log('checking client timeout')
|
||||
|
||||
// connection timeout logic
|
||||
let tid
|
||||
let timeoutHit = false
|
||||
if (this.options.connectionTimeoutMillis) {
|
||||
tid = setTimeout(() => {
|
||||
this.log('ending client due to timeout')
|
||||
timeoutHit = true
|
||||
// force kill the node driver, and let libpq do its teardown
|
||||
client.connection ? client.connection.stream.destroy() : client.end()
|
||||
}, this.options.connectionTimeoutMillis)
|
||||
}
|
||||
|
||||
this.log('connecting new client')
|
||||
client.connect((err) => {
|
||||
if (tid) {
|
||||
clearTimeout(tid)
|
||||
}
|
||||
client.on('error', idleListener)
|
||||
if (err) {
|
||||
this.log('client failed to connect', err)
|
||||
// remove the dead client from our list of clients
|
||||
this._clients = this._clients.filter((c) => c !== client)
|
||||
if (timeoutHit) {
|
||||
err.message = 'Connection terminated due to connection timeout'
|
||||
}
|
||||
|
||||
// this client won’t be released, so move on immediately
|
||||
this._pulseQueue()
|
||||
|
||||
if (!pendingItem.timedOut) {
|
||||
pendingItem.callback(err, undefined, NOOP)
|
||||
}
|
||||
} else {
|
||||
this.log('new client connected')
|
||||
|
||||
if (this.options.maxLifetimeSeconds !== 0) {
|
||||
const maxLifetimeTimeout = setTimeout(() => {
|
||||
this.log('ending client due to expired lifetime')
|
||||
this._expired.add(client)
|
||||
const idleIndex = this._idle.findIndex((idleItem) => idleItem.client === client)
|
||||
if (idleIndex !== -1) {
|
||||
this._acquireClient(
|
||||
client,
|
||||
new PendingItem((err, client, clientRelease) => clientRelease()),
|
||||
idleListener,
|
||||
false
|
||||
)
|
||||
}
|
||||
}, this.options.maxLifetimeSeconds * 1000)
|
||||
|
||||
maxLifetimeTimeout.unref()
|
||||
client.once('end', () => clearTimeout(maxLifetimeTimeout))
|
||||
}
|
||||
|
||||
return this._acquireClient(client, pendingItem, idleListener, true)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// acquire a client for a pending work item
|
||||
_acquireClient(client, pendingItem, idleListener, isNew) {
|
||||
if (isNew) {
|
||||
this.emit('connect', client)
|
||||
}
|
||||
|
||||
this.emit('acquire', client)
|
||||
|
||||
client.release = this._releaseOnce(client, idleListener)
|
||||
|
||||
client.removeListener('error', idleListener)
|
||||
|
||||
if (!pendingItem.timedOut) {
|
||||
if (isNew && this.options.verify) {
|
||||
this.options.verify(client, (err) => {
|
||||
if (err) {
|
||||
client.release(err)
|
||||
return pendingItem.callback(err, undefined, NOOP)
|
||||
}
|
||||
|
||||
pendingItem.callback(undefined, client, client.release)
|
||||
})
|
||||
} else {
|
||||
pendingItem.callback(undefined, client, client.release)
|
||||
}
|
||||
} else {
|
||||
if (isNew && this.options.verify) {
|
||||
this.options.verify(client, client.release)
|
||||
} else {
|
||||
client.release()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// returns a function that wraps _release and throws if called more than once
|
||||
_releaseOnce(client, idleListener) {
|
||||
let released = false
|
||||
|
||||
return (err) => {
|
||||
if (released) {
|
||||
throwOnDoubleRelease()
|
||||
}
|
||||
|
||||
released = true
|
||||
this._release(client, idleListener, err)
|
||||
}
|
||||
}
|
||||
|
||||
// release a client back to the poll, include an error
|
||||
// to remove it from the pool
|
||||
_release(client, idleListener, err) {
|
||||
client.on('error', idleListener)
|
||||
|
||||
client._poolUseCount = (client._poolUseCount || 0) + 1
|
||||
|
||||
this.emit('release', err, client)
|
||||
|
||||
// TODO(bmc): expose a proper, public interface _queryable and _ending
|
||||
if (err || this.ending || !client._queryable || client._ending || client._poolUseCount >= this.options.maxUses) {
|
||||
if (client._poolUseCount >= this.options.maxUses) {
|
||||
this.log('remove expended client')
|
||||
}
|
||||
this._remove(client)
|
||||
this._pulseQueue()
|
||||
return
|
||||
}
|
||||
|
||||
const isExpired = this._expired.has(client)
|
||||
if (isExpired) {
|
||||
this.log('remove expired client')
|
||||
this._expired.delete(client)
|
||||
this._remove(client)
|
||||
this._pulseQueue()
|
||||
return
|
||||
}
|
||||
|
||||
// idle timeout
|
||||
let tid
|
||||
if (this.options.idleTimeoutMillis) {
|
||||
tid = setTimeout(() => {
|
||||
this.log('remove idle client')
|
||||
this._remove(client)
|
||||
}, this.options.idleTimeoutMillis)
|
||||
|
||||
if (this.options.allowExitOnIdle) {
|
||||
// allow Node to exit if this is all that's left
|
||||
tid.unref()
|
||||
}
|
||||
}
|
||||
|
||||
if (this.options.allowExitOnIdle) {
|
||||
client.unref()
|
||||
}
|
||||
|
||||
this._idle.push(new IdleItem(client, idleListener, tid))
|
||||
this._pulseQueue()
|
||||
}
|
||||
|
||||
query(text, values, cb) {
|
||||
// guard clause against passing a function as the first parameter
|
||||
if (typeof text === 'function') {
|
||||
const response = promisify(this.Promise, text)
|
||||
setImmediate(function () {
|
||||
return response.callback(new Error('Passing a function as the first parameter to pool.query is not supported'))
|
||||
})
|
||||
return response.result
|
||||
}
|
||||
|
||||
// allow plain text query without values
|
||||
if (typeof values === 'function') {
|
||||
cb = values
|
||||
values = undefined
|
||||
}
|
||||
const response = promisify(this.Promise, cb)
|
||||
cb = response.callback
|
||||
|
||||
this.connect((err, client) => {
|
||||
if (err) {
|
||||
return cb(err)
|
||||
}
|
||||
|
||||
let clientReleased = false
|
||||
const onError = (err) => {
|
||||
if (clientReleased) {
|
||||
return
|
||||
}
|
||||
clientReleased = true
|
||||
client.release(err)
|
||||
cb(err)
|
||||
}
|
||||
|
||||
client.once('error', onError)
|
||||
this.log('dispatching query')
|
||||
try {
|
||||
client.query(text, values, (err, res) => {
|
||||
this.log('query dispatched')
|
||||
client.removeListener('error', onError)
|
||||
if (clientReleased) {
|
||||
return
|
||||
}
|
||||
clientReleased = true
|
||||
client.release(err)
|
||||
if (err) {
|
||||
return cb(err)
|
||||
}
|
||||
return cb(undefined, res)
|
||||
})
|
||||
} catch (err) {
|
||||
client.release(err)
|
||||
return cb(err)
|
||||
}
|
||||
})
|
||||
return response.result
|
||||
}
|
||||
|
||||
end(cb) {
|
||||
this.log('ending')
|
||||
if (this.ending) {
|
||||
const err = new Error('Called end on pool more than once')
|
||||
return cb ? cb(err) : this.Promise.reject(err)
|
||||
}
|
||||
this.ending = true
|
||||
const promised = promisify(this.Promise, cb)
|
||||
this._endCallback = promised.callback
|
||||
this._pulseQueue()
|
||||
return promised.result
|
||||
}
|
||||
|
||||
get waitingCount() {
|
||||
return this._pendingQueue.length
|
||||
}
|
||||
|
||||
get idleCount() {
|
||||
return this._idle.length
|
||||
}
|
||||
|
||||
get expiredCount() {
|
||||
return this._clients.reduce((acc, client) => acc + (this._expired.has(client) ? 1 : 0), 0)
|
||||
}
|
||||
|
||||
get totalCount() {
|
||||
return this._clients.length
|
||||
}
|
||||
}
|
||||
module.exports = Pool
|
41
lib/node/node_modules/pg-pool/package.json
generated
vendored
Normal file
41
lib/node/node_modules/pg-pool/package.json
generated
vendored
Normal file
|
@ -0,0 +1,41 @@
|
|||
{
|
||||
"name": "pg-pool",
|
||||
"version": "3.6.2",
|
||||
"description": "Connection pool for node-postgres",
|
||||
"main": "index.js",
|
||||
"directories": {
|
||||
"test": "test"
|
||||
},
|
||||
"scripts": {
|
||||
"test": " node_modules/.bin/mocha"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/brianc/node-postgres.git",
|
||||
"directory": "packages/pg-pool"
|
||||
},
|
||||
"keywords": [
|
||||
"pg",
|
||||
"postgres",
|
||||
"pool",
|
||||
"database"
|
||||
],
|
||||
"author": "Brian M. Carlson",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/brianc/node-pg-pool/issues"
|
||||
},
|
||||
"homepage": "https://github.com/brianc/node-pg-pool#readme",
|
||||
"devDependencies": {
|
||||
"bluebird": "3.4.1",
|
||||
"co": "4.6.0",
|
||||
"expect.js": "0.3.1",
|
||||
"lodash": "^4.17.11",
|
||||
"mocha": "^7.1.2",
|
||||
"pg-cursor": "^1.3.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"pg": ">=8.0"
|
||||
},
|
||||
"gitHead": "b03c071d2d15af259e1e008e9628191c865e58fa"
|
||||
}
|
42
lib/node/node_modules/pg-pool/test/bring-your-own-promise.js
generated
vendored
Normal file
42
lib/node/node_modules/pg-pool/test/bring-your-own-promise.js
generated
vendored
Normal file
|
@ -0,0 +1,42 @@
|
|||
'use strict'
|
||||
const co = require('co')
|
||||
const expect = require('expect.js')
|
||||
|
||||
const describe = require('mocha').describe
|
||||
const it = require('mocha').it
|
||||
const BluebirdPromise = require('bluebird')
|
||||
|
||||
const Pool = require('../')
|
||||
|
||||
const checkType = (promise) => {
|
||||
expect(promise).to.be.a(BluebirdPromise)
|
||||
return promise.catch((e) => undefined)
|
||||
}
|
||||
|
||||
describe('Bring your own promise', function () {
|
||||
it(
|
||||
'uses supplied promise for operations',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool({ Promise: BluebirdPromise })
|
||||
const client1 = yield checkType(pool.connect())
|
||||
client1.release()
|
||||
yield checkType(pool.query('SELECT NOW()'))
|
||||
const client2 = yield checkType(pool.connect())
|
||||
// TODO - make sure pg supports BYOP as well
|
||||
client2.release()
|
||||
yield checkType(pool.end())
|
||||
})
|
||||
)
|
||||
|
||||
it(
|
||||
'uses promises in errors',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool({ Promise: BluebirdPromise, port: 48484 })
|
||||
yield checkType(pool.connect())
|
||||
yield checkType(pool.end())
|
||||
yield checkType(pool.connect())
|
||||
yield checkType(pool.query())
|
||||
yield checkType(pool.end())
|
||||
})
|
||||
)
|
||||
})
|
29
lib/node/node_modules/pg-pool/test/connection-strings.js
generated
vendored
Normal file
29
lib/node/node_modules/pg-pool/test/connection-strings.js
generated
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
const expect = require('expect.js')
|
||||
const describe = require('mocha').describe
|
||||
const it = require('mocha').it
|
||||
const Pool = require('../')
|
||||
|
||||
describe('Connection strings', function () {
|
||||
it('pool delegates connectionString property to client', function (done) {
|
||||
const connectionString = 'postgres://foo:bar@baz:1234/xur'
|
||||
|
||||
const pool = new Pool({
|
||||
// use a fake client so we can check we're passed the connectionString
|
||||
Client: function (args) {
|
||||
expect(args.connectionString).to.equal(connectionString)
|
||||
return {
|
||||
connect: function (cb) {
|
||||
cb(new Error('testing'))
|
||||
},
|
||||
on: function () {},
|
||||
}
|
||||
},
|
||||
connectionString: connectionString,
|
||||
})
|
||||
|
||||
pool.connect(function (err, client) {
|
||||
expect(err).to.not.be(undefined)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
229
lib/node/node_modules/pg-pool/test/connection-timeout.js
generated
vendored
Normal file
229
lib/node/node_modules/pg-pool/test/connection-timeout.js
generated
vendored
Normal file
|
@ -0,0 +1,229 @@
|
|||
'use strict'
|
||||
const net = require('net')
|
||||
const co = require('co')
|
||||
const expect = require('expect.js')
|
||||
|
||||
const describe = require('mocha').describe
|
||||
const it = require('mocha').it
|
||||
const before = require('mocha').before
|
||||
const after = require('mocha').after
|
||||
|
||||
const Pool = require('../')
|
||||
|
||||
describe('connection timeout', () => {
|
||||
const connectionFailure = new Error('Temporary connection failure')
|
||||
|
||||
before((done) => {
|
||||
this.server = net.createServer((socket) => {
|
||||
socket.on('data', () => {
|
||||
// discard any buffered data or the server wont terminate
|
||||
})
|
||||
})
|
||||
|
||||
this.server.listen(() => {
|
||||
this.port = this.server.address().port
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
after((done) => {
|
||||
this.server.close(done)
|
||||
})
|
||||
|
||||
it('should callback with an error if timeout is passed', (done) => {
|
||||
const pool = new Pool({ connectionTimeoutMillis: 10, port: this.port, host: 'localhost' })
|
||||
pool.connect((err, client, release) => {
|
||||
expect(err).to.be.an(Error)
|
||||
expect(err.message).to.contain('timeout')
|
||||
expect(client).to.equal(undefined)
|
||||
expect(pool.idleCount).to.equal(0)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should reject promise with an error if timeout is passed', (done) => {
|
||||
const pool = new Pool({ connectionTimeoutMillis: 10, port: this.port, host: 'localhost' })
|
||||
pool.connect().catch((err) => {
|
||||
expect(err).to.be.an(Error)
|
||||
expect(err.message).to.contain('timeout')
|
||||
expect(pool.idleCount).to.equal(0)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it(
|
||||
'should handle multiple timeouts',
|
||||
co.wrap(
|
||||
function* () {
|
||||
const errors = []
|
||||
const pool = new Pool({ connectionTimeoutMillis: 1, port: this.port, host: 'localhost' })
|
||||
for (var i = 0; i < 15; i++) {
|
||||
try {
|
||||
yield pool.connect()
|
||||
} catch (e) {
|
||||
errors.push(e)
|
||||
}
|
||||
}
|
||||
expect(errors).to.have.length(15)
|
||||
}.bind(this)
|
||||
)
|
||||
)
|
||||
|
||||
it('should timeout on checkout of used connection', (done) => {
|
||||
const pool = new Pool({ connectionTimeoutMillis: 100, max: 1 })
|
||||
pool.connect((err, client, release) => {
|
||||
expect(err).to.be(undefined)
|
||||
expect(client).to.not.be(undefined)
|
||||
pool.connect((err, client) => {
|
||||
expect(err).to.be.an(Error)
|
||||
expect(client).to.be(undefined)
|
||||
release()
|
||||
pool.end(done)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should not break further pending checkouts on a timeout', (done) => {
|
||||
const pool = new Pool({ connectionTimeoutMillis: 200, max: 1 })
|
||||
pool.connect((err, client, releaseOuter) => {
|
||||
expect(err).to.be(undefined)
|
||||
|
||||
pool.connect((err, client) => {
|
||||
expect(err).to.be.an(Error)
|
||||
expect(client).to.be(undefined)
|
||||
releaseOuter()
|
||||
})
|
||||
|
||||
setTimeout(() => {
|
||||
pool.connect((err, client, releaseInner) => {
|
||||
expect(err).to.be(undefined)
|
||||
expect(client).to.not.be(undefined)
|
||||
releaseInner()
|
||||
pool.end(done)
|
||||
})
|
||||
}, 100)
|
||||
})
|
||||
})
|
||||
|
||||
it('should timeout on query if all clients are busy', (done) => {
|
||||
const pool = new Pool({ connectionTimeoutMillis: 100, max: 1 })
|
||||
pool.connect((err, client, release) => {
|
||||
expect(err).to.be(undefined)
|
||||
expect(client).to.not.be(undefined)
|
||||
pool.query('select now()', (err, result) => {
|
||||
expect(err).to.be.an(Error)
|
||||
expect(result).to.be(undefined)
|
||||
release()
|
||||
pool.end(done)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should recover from timeout errors', (done) => {
|
||||
const pool = new Pool({ connectionTimeoutMillis: 100, max: 1 })
|
||||
pool.connect((err, client, release) => {
|
||||
expect(err).to.be(undefined)
|
||||
expect(client).to.not.be(undefined)
|
||||
pool.query('select now()', (err, result) => {
|
||||
expect(err).to.be.an(Error)
|
||||
expect(result).to.be(undefined)
|
||||
release()
|
||||
pool.query('select $1::text as name', ['brianc'], (err, res) => {
|
||||
expect(err).to.be(undefined)
|
||||
expect(res.rows).to.have.length(1)
|
||||
pool.end(done)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('continues processing after a connection failure', (done) => {
|
||||
const Client = require('pg').Client
|
||||
const orgConnect = Client.prototype.connect
|
||||
let called = false
|
||||
|
||||
Client.prototype.connect = function (cb) {
|
||||
// Simulate a failure on first call
|
||||
if (!called) {
|
||||
called = true
|
||||
|
||||
return setTimeout(() => {
|
||||
cb(connectionFailure)
|
||||
}, 100)
|
||||
}
|
||||
// And pass-through the second call
|
||||
orgConnect.call(this, cb)
|
||||
}
|
||||
|
||||
const pool = new Pool({
|
||||
Client: Client,
|
||||
connectionTimeoutMillis: 1000,
|
||||
max: 1,
|
||||
})
|
||||
|
||||
pool.connect((err, client, release) => {
|
||||
expect(err).to.be(connectionFailure)
|
||||
|
||||
pool.query('select $1::text as name', ['brianc'], (err, res) => {
|
||||
expect(err).to.be(undefined)
|
||||
expect(res.rows).to.have.length(1)
|
||||
pool.end(done)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('releases newly connected clients if the queued already timed out', (done) => {
|
||||
const Client = require('pg').Client
|
||||
|
||||
const orgConnect = Client.prototype.connect
|
||||
|
||||
let connection = 0
|
||||
|
||||
Client.prototype.connect = function (cb) {
|
||||
// Simulate a failure on first call
|
||||
if (connection === 0) {
|
||||
connection++
|
||||
|
||||
return setTimeout(() => {
|
||||
cb(connectionFailure)
|
||||
}, 300)
|
||||
}
|
||||
|
||||
// And second connect taking > connection timeout
|
||||
if (connection === 1) {
|
||||
connection++
|
||||
|
||||
return setTimeout(() => {
|
||||
orgConnect.call(this, cb)
|
||||
}, 1000)
|
||||
}
|
||||
|
||||
orgConnect.call(this, cb)
|
||||
}
|
||||
|
||||
const pool = new Pool({
|
||||
Client: Client,
|
||||
connectionTimeoutMillis: 1000,
|
||||
max: 1,
|
||||
})
|
||||
|
||||
// Direct connect
|
||||
pool.connect((err, client, release) => {
|
||||
expect(err).to.be(connectionFailure)
|
||||
})
|
||||
|
||||
// Queued
|
||||
let called = 0
|
||||
pool.connect((err, client, release) => {
|
||||
// Verify the callback is only called once
|
||||
expect(called++).to.be(0)
|
||||
expect(err).to.be.an(Error)
|
||||
|
||||
pool.query('select $1::text as name', ['brianc'], (err, res) => {
|
||||
expect(err).to.be(undefined)
|
||||
expect(res.rows).to.have.length(1)
|
||||
pool.end(done)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
40
lib/node/node_modules/pg-pool/test/ending.js
generated
vendored
Normal file
40
lib/node/node_modules/pg-pool/test/ending.js
generated
vendored
Normal file
|
@ -0,0 +1,40 @@
|
|||
'use strict'
|
||||
const co = require('co')
|
||||
const expect = require('expect.js')
|
||||
|
||||
const describe = require('mocha').describe
|
||||
const it = require('mocha').it
|
||||
|
||||
const Pool = require('../')
|
||||
|
||||
describe('pool ending', () => {
|
||||
it('ends without being used', (done) => {
|
||||
const pool = new Pool()
|
||||
pool.end(done)
|
||||
})
|
||||
|
||||
it('ends with a promise', () => {
|
||||
return new Pool().end()
|
||||
})
|
||||
|
||||
it(
|
||||
'ends with clients',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool()
|
||||
const res = yield pool.query('SELECT $1::text as name', ['brianc'])
|
||||
expect(res.rows[0].name).to.equal('brianc')
|
||||
return pool.end()
|
||||
})
|
||||
)
|
||||
|
||||
it(
|
||||
'allows client to finish',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool()
|
||||
const query = pool.query('SELECT $1::text as name', ['brianc'])
|
||||
yield pool.end()
|
||||
const res = yield query
|
||||
expect(res.rows[0].name).to.equal('brianc')
|
||||
})
|
||||
)
|
||||
})
|
260
lib/node/node_modules/pg-pool/test/error-handling.js
generated
vendored
Normal file
260
lib/node/node_modules/pg-pool/test/error-handling.js
generated
vendored
Normal file
|
@ -0,0 +1,260 @@
|
|||
'use strict'
|
||||
const net = require('net')
|
||||
const co = require('co')
|
||||
const expect = require('expect.js')
|
||||
|
||||
const describe = require('mocha').describe
|
||||
const it = require('mocha').it
|
||||
|
||||
const Pool = require('../')
|
||||
|
||||
describe('pool error handling', function () {
|
||||
it('Should complete these queries without dying', function (done) {
|
||||
const pool = new Pool()
|
||||
let errors = 0
|
||||
let shouldGet = 0
|
||||
function runErrorQuery() {
|
||||
shouldGet++
|
||||
return new Promise(function (resolve, reject) {
|
||||
pool
|
||||
.query("SELECT 'asd'+1 ")
|
||||
.then(function (res) {
|
||||
reject(res) // this should always error
|
||||
})
|
||||
.catch(function (err) {
|
||||
errors++
|
||||
resolve(err)
|
||||
})
|
||||
})
|
||||
}
|
||||
const ps = []
|
||||
for (let i = 0; i < 5; i++) {
|
||||
ps.push(runErrorQuery())
|
||||
}
|
||||
Promise.all(ps).then(function () {
|
||||
expect(shouldGet).to.eql(errors)
|
||||
pool.end(done)
|
||||
})
|
||||
})
|
||||
|
||||
it('Catches errors in client.query', async function () {
|
||||
let caught = false
|
||||
const pool = new Pool()
|
||||
try {
|
||||
await pool.query(null)
|
||||
} catch (e) {
|
||||
caught = true
|
||||
}
|
||||
pool.end()
|
||||
expect(caught).to.be(true)
|
||||
})
|
||||
|
||||
describe('calling release more than once', () => {
|
||||
it(
|
||||
'should throw each time',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool()
|
||||
const client = yield pool.connect()
|
||||
client.release()
|
||||
expect(() => client.release()).to.throwError()
|
||||
expect(() => client.release()).to.throwError()
|
||||
return yield pool.end()
|
||||
})
|
||||
)
|
||||
|
||||
it('should throw each time with callbacks', function (done) {
|
||||
const pool = new Pool()
|
||||
|
||||
pool.connect(function (err, client, clientDone) {
|
||||
expect(err).not.to.be.an(Error)
|
||||
clientDone()
|
||||
|
||||
expect(() => clientDone()).to.throwError()
|
||||
expect(() => clientDone()).to.throwError()
|
||||
|
||||
pool.end(done)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('using an ended pool', () => {
|
||||
it('rejects all additional promises', (done) => {
|
||||
const pool = new Pool()
|
||||
const promises = []
|
||||
pool.end().then(() => {
|
||||
const squash = (promise) => promise.catch((e) => 'okay!')
|
||||
promises.push(squash(pool.connect()))
|
||||
promises.push(squash(pool.query('SELECT NOW()')))
|
||||
promises.push(squash(pool.end()))
|
||||
Promise.all(promises).then((res) => {
|
||||
expect(res).to.eql(['okay!', 'okay!', 'okay!'])
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('returns an error on all additional callbacks', (done) => {
|
||||
const pool = new Pool()
|
||||
pool.end(() => {
|
||||
pool.query('SELECT *', (err) => {
|
||||
expect(err).to.be.an(Error)
|
||||
pool.connect((err) => {
|
||||
expect(err).to.be.an(Error)
|
||||
pool.end((err) => {
|
||||
expect(err).to.be.an(Error)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('error from idle client', () => {
|
||||
it(
|
||||
'removes client from pool',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool()
|
||||
const client = yield pool.connect()
|
||||
expect(pool.totalCount).to.equal(1)
|
||||
expect(pool.waitingCount).to.equal(0)
|
||||
expect(pool.idleCount).to.equal(0)
|
||||
client.release()
|
||||
yield new Promise((resolve, reject) => {
|
||||
process.nextTick(() => {
|
||||
let poolError
|
||||
pool.once('error', (err) => {
|
||||
poolError = err
|
||||
})
|
||||
|
||||
let clientError
|
||||
client.once('error', (err) => {
|
||||
clientError = err
|
||||
})
|
||||
|
||||
client.emit('error', new Error('expected'))
|
||||
|
||||
expect(clientError.message).to.equal('expected')
|
||||
expect(poolError.message).to.equal('expected')
|
||||
expect(pool.idleCount).to.equal(0)
|
||||
expect(pool.totalCount).to.equal(0)
|
||||
pool.end().then(resolve, reject)
|
||||
})
|
||||
})
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
describe('error from in-use client', () => {
|
||||
it(
|
||||
'keeps the client in the pool',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool()
|
||||
const client = yield pool.connect()
|
||||
expect(pool.totalCount).to.equal(1)
|
||||
expect(pool.waitingCount).to.equal(0)
|
||||
expect(pool.idleCount).to.equal(0)
|
||||
|
||||
yield new Promise((resolve, reject) => {
|
||||
process.nextTick(() => {
|
||||
let poolError
|
||||
pool.once('error', (err) => {
|
||||
poolError = err
|
||||
})
|
||||
|
||||
let clientError
|
||||
client.once('error', (err) => {
|
||||
clientError = err
|
||||
})
|
||||
|
||||
client.emit('error', new Error('expected'))
|
||||
|
||||
expect(clientError.message).to.equal('expected')
|
||||
expect(poolError).not.to.be.ok()
|
||||
expect(pool.idleCount).to.equal(0)
|
||||
expect(pool.totalCount).to.equal(1)
|
||||
client.release()
|
||||
pool.end().then(resolve, reject)
|
||||
})
|
||||
})
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
describe('passing a function to pool.query', () => {
|
||||
it('calls back with error', (done) => {
|
||||
const pool = new Pool()
|
||||
console.log('passing fn to query')
|
||||
pool.query((err) => {
|
||||
expect(err).to.be.an(Error)
|
||||
pool.end(done)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('pool with lots of errors', () => {
|
||||
it(
|
||||
'continues to work and provide new clients',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool({ max: 1 })
|
||||
const errors = []
|
||||
for (var i = 0; i < 20; i++) {
|
||||
try {
|
||||
yield pool.query('invalid sql')
|
||||
} catch (err) {
|
||||
errors.push(err)
|
||||
}
|
||||
}
|
||||
expect(errors).to.have.length(20)
|
||||
expect(pool.idleCount).to.equal(0)
|
||||
expect(pool.query).to.be.a(Function)
|
||||
const res = yield pool.query('SELECT $1::text as name', ['brianc'])
|
||||
expect(res.rows).to.have.length(1)
|
||||
expect(res.rows[0].name).to.equal('brianc')
|
||||
return pool.end()
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('should continue with queued items after a connection failure', (done) => {
|
||||
const closeServer = net
|
||||
.createServer((socket) => {
|
||||
socket.destroy()
|
||||
})
|
||||
.unref()
|
||||
|
||||
closeServer.listen(() => {
|
||||
const pool = new Pool({ max: 1, port: closeServer.address().port, host: 'localhost' })
|
||||
pool.connect((err) => {
|
||||
expect(err).to.be.an(Error)
|
||||
if (err.code) {
|
||||
expect(err.code).to.be('ECONNRESET')
|
||||
}
|
||||
})
|
||||
pool.connect((err) => {
|
||||
expect(err).to.be.an(Error)
|
||||
if (err.code) {
|
||||
expect(err.code).to.be('ECONNRESET')
|
||||
}
|
||||
closeServer.close(() => {
|
||||
pool.end(done)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('handles post-checkout client failures in pool.query', (done) => {
|
||||
const pool = new Pool({ max: 1 })
|
||||
pool.on('error', () => {
|
||||
// We double close the connection in this test, prevent exception caused by that
|
||||
})
|
||||
pool.query('SELECT pg_sleep(5)', [], (err) => {
|
||||
expect(err).to.be.an(Error)
|
||||
done()
|
||||
})
|
||||
|
||||
setTimeout(() => {
|
||||
pool._clients[0].end()
|
||||
}, 1000)
|
||||
})
|
||||
})
|
124
lib/node/node_modules/pg-pool/test/events.js
generated
vendored
Normal file
124
lib/node/node_modules/pg-pool/test/events.js
generated
vendored
Normal file
|
@ -0,0 +1,124 @@
|
|||
'use strict'
|
||||
|
||||
const expect = require('expect.js')
|
||||
const EventEmitter = require('events').EventEmitter
|
||||
const describe = require('mocha').describe
|
||||
const it = require('mocha').it
|
||||
const Pool = require('../')
|
||||
|
||||
describe('events', function () {
|
||||
it('emits connect before callback', function (done) {
|
||||
const pool = new Pool()
|
||||
let emittedClient = false
|
||||
pool.on('connect', function (client) {
|
||||
emittedClient = client
|
||||
})
|
||||
|
||||
pool.connect(function (err, client, release) {
|
||||
if (err) return done(err)
|
||||
release()
|
||||
pool.end()
|
||||
expect(client).to.be(emittedClient)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('emits "connect" only with a successful connection', function () {
|
||||
const pool = new Pool({
|
||||
// This client will always fail to connect
|
||||
Client: mockClient({
|
||||
connect: function (cb) {
|
||||
process.nextTick(() => {
|
||||
cb(new Error('bad news'))
|
||||
})
|
||||
},
|
||||
}),
|
||||
})
|
||||
pool.on('connect', function () {
|
||||
throw new Error('should never get here')
|
||||
})
|
||||
return pool.connect().catch((e) => expect(e.message).to.equal('bad news'))
|
||||
})
|
||||
|
||||
it('emits acquire every time a client is acquired', function (done) {
|
||||
const pool = new Pool()
|
||||
let acquireCount = 0
|
||||
pool.on('acquire', function (client) {
|
||||
expect(client).to.be.ok()
|
||||
acquireCount++
|
||||
})
|
||||
for (let i = 0; i < 10; i++) {
|
||||
pool.connect(function (err, client, release) {
|
||||
if (err) return done(err)
|
||||
release()
|
||||
})
|
||||
pool.query('SELECT now()')
|
||||
}
|
||||
setTimeout(function () {
|
||||
expect(acquireCount).to.be(20)
|
||||
pool.end(done)
|
||||
}, 100)
|
||||
})
|
||||
|
||||
it('emits release every time a client is released', function (done) {
|
||||
const pool = new Pool()
|
||||
let releaseCount = 0
|
||||
pool.on('release', function (err, client) {
|
||||
expect(err instanceof Error).not.to.be(true)
|
||||
expect(client).to.be.ok()
|
||||
releaseCount++
|
||||
})
|
||||
const promises = []
|
||||
for (let i = 0; i < 10; i++) {
|
||||
pool.connect(function (err, client, release) {
|
||||
if (err) return done(err)
|
||||
release()
|
||||
})
|
||||
promises.push(pool.query('SELECT now()'))
|
||||
}
|
||||
Promise.all(promises).then(() => {
|
||||
pool.end(() => {
|
||||
expect(releaseCount).to.be(20)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('emits release with an error if client is released due to an error', function (done) {
|
||||
const pool = new Pool()
|
||||
pool.connect(function (err, client, release) {
|
||||
expect(err).to.equal(undefined)
|
||||
const releaseError = new Error('problem')
|
||||
pool.once('release', function (err, errClient) {
|
||||
expect(err).to.equal(releaseError)
|
||||
expect(errClient).to.equal(client)
|
||||
pool.end(done)
|
||||
})
|
||||
release(releaseError)
|
||||
})
|
||||
})
|
||||
|
||||
it('emits error and client if an idle client in the pool hits an error', function (done) {
|
||||
const pool = new Pool()
|
||||
pool.connect(function (err, client) {
|
||||
expect(err).to.equal(undefined)
|
||||
client.release()
|
||||
setImmediate(function () {
|
||||
client.emit('error', new Error('problem'))
|
||||
})
|
||||
pool.once('error', function (err, errClient) {
|
||||
expect(err.message).to.equal('problem')
|
||||
expect(errClient).to.equal(client)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
function mockClient(methods) {
|
||||
return function () {
|
||||
const client = new EventEmitter()
|
||||
Object.assign(client, methods)
|
||||
return client
|
||||
}
|
||||
}
|
20
lib/node/node_modules/pg-pool/test/idle-timeout-exit.js
generated
vendored
Normal file
20
lib/node/node_modules/pg-pool/test/idle-timeout-exit.js
generated
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
// This test is meant to be spawned from idle-timeout.js
|
||||
if (module === require.main) {
|
||||
const allowExitOnIdle = process.env.ALLOW_EXIT_ON_IDLE === '1'
|
||||
const Pool = require('../index')
|
||||
|
||||
const pool = new Pool({
|
||||
maxLifetimeSeconds: 2,
|
||||
idleTimeoutMillis: 200,
|
||||
...(allowExitOnIdle ? { allowExitOnIdle: true } : {}),
|
||||
})
|
||||
pool.query('SELECT NOW()', (err, res) => console.log('completed first'))
|
||||
pool.on('remove', () => {
|
||||
console.log('removed')
|
||||
done()
|
||||
})
|
||||
|
||||
setTimeout(() => {
|
||||
pool.query('SELECT * from generate_series(0, 1000)', (err, res) => console.log('completed second'))
|
||||
}, 50)
|
||||
}
|
118
lib/node/node_modules/pg-pool/test/idle-timeout.js
generated
vendored
Normal file
118
lib/node/node_modules/pg-pool/test/idle-timeout.js
generated
vendored
Normal file
|
@ -0,0 +1,118 @@
|
|||
'use strict'
|
||||
const co = require('co')
|
||||
const expect = require('expect.js')
|
||||
|
||||
const describe = require('mocha').describe
|
||||
const it = require('mocha').it
|
||||
const { fork } = require('child_process')
|
||||
const path = require('path')
|
||||
|
||||
const Pool = require('../')
|
||||
|
||||
const wait = (time) => new Promise((resolve) => setTimeout(resolve, time))
|
||||
|
||||
describe('idle timeout', () => {
|
||||
it('should timeout and remove the client', (done) => {
|
||||
const pool = new Pool({ idleTimeoutMillis: 10 })
|
||||
pool.query('SELECT NOW()')
|
||||
pool.on('remove', () => {
|
||||
expect(pool.idleCount).to.equal(0)
|
||||
expect(pool.totalCount).to.equal(0)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it(
|
||||
'times out and removes clients when others are also removed',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool({ idleTimeoutMillis: 10 })
|
||||
const clientA = yield pool.connect()
|
||||
const clientB = yield pool.connect()
|
||||
clientA.release()
|
||||
clientB.release(new Error())
|
||||
|
||||
const removal = new Promise((resolve) => {
|
||||
pool.on('remove', () => {
|
||||
expect(pool.idleCount).to.equal(0)
|
||||
expect(pool.totalCount).to.equal(0)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
|
||||
const timeout = wait(100).then(() => Promise.reject(new Error('Idle timeout failed to occur')))
|
||||
|
||||
try {
|
||||
yield Promise.race([removal, timeout])
|
||||
} finally {
|
||||
pool.end()
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
it(
|
||||
'can remove idle clients and recreate them',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool({ idleTimeoutMillis: 1 })
|
||||
const results = []
|
||||
for (var i = 0; i < 20; i++) {
|
||||
let query = pool.query('SELECT NOW()')
|
||||
expect(pool.idleCount).to.equal(0)
|
||||
expect(pool.totalCount).to.equal(1)
|
||||
results.push(yield query)
|
||||
yield wait(2)
|
||||
expect(pool.idleCount).to.equal(0)
|
||||
expect(pool.totalCount).to.equal(0)
|
||||
}
|
||||
expect(results).to.have.length(20)
|
||||
})
|
||||
)
|
||||
|
||||
it(
|
||||
'does not time out clients which are used',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool({ idleTimeoutMillis: 1 })
|
||||
const results = []
|
||||
for (var i = 0; i < 20; i++) {
|
||||
let client = yield pool.connect()
|
||||
expect(pool.totalCount).to.equal(1)
|
||||
expect(pool.idleCount).to.equal(0)
|
||||
yield wait(10)
|
||||
results.push(yield client.query('SELECT NOW()'))
|
||||
client.release()
|
||||
expect(pool.idleCount).to.equal(1)
|
||||
expect(pool.totalCount).to.equal(1)
|
||||
}
|
||||
expect(results).to.have.length(20)
|
||||
return pool.end()
|
||||
})
|
||||
)
|
||||
|
||||
it('unrefs the connections and timeouts so the program can exit when idle when the allowExitOnIdle option is set', function (done) {
|
||||
const child = fork(path.join(__dirname, 'idle-timeout-exit.js'), [], {
|
||||
silent: true,
|
||||
env: { ...process.env, ALLOW_EXIT_ON_IDLE: '1' },
|
||||
})
|
||||
let result = ''
|
||||
child.stdout.setEncoding('utf8')
|
||||
child.stdout.on('data', (chunk) => (result += chunk))
|
||||
child.on('error', (err) => done(err))
|
||||
child.on('close', () => {
|
||||
expect(result).to.equal('completed first\ncompleted second\n')
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('keeps old behavior when allowExitOnIdle option is not set', function (done) {
|
||||
const child = fork(path.join(__dirname, 'idle-timeout-exit.js'), [], {
|
||||
silent: true,
|
||||
})
|
||||
let result = ''
|
||||
child.stdout.setEncoding('utf8')
|
||||
child.stdout.on('data', (chunk) => (result += chunk))
|
||||
child.on('error', (err) => done(err))
|
||||
child.on('close', () => {
|
||||
expect(result).to.equal('completed first\ncompleted second\nremoved\n')
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
226
lib/node/node_modules/pg-pool/test/index.js
generated
vendored
Normal file
226
lib/node/node_modules/pg-pool/test/index.js
generated
vendored
Normal file
|
@ -0,0 +1,226 @@
|
|||
'use strict'
|
||||
const expect = require('expect.js')
|
||||
const _ = require('lodash')
|
||||
|
||||
const describe = require('mocha').describe
|
||||
const it = require('mocha').it
|
||||
|
||||
const Pool = require('../')
|
||||
|
||||
describe('pool', function () {
|
||||
describe('with callbacks', function () {
|
||||
it('works totally unconfigured', function (done) {
|
||||
const pool = new Pool()
|
||||
pool.connect(function (err, client, release) {
|
||||
if (err) return done(err)
|
||||
client.query('SELECT NOW()', function (err, res) {
|
||||
release()
|
||||
if (err) return done(err)
|
||||
expect(res.rows).to.have.length(1)
|
||||
pool.end(done)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('passes props to clients', function (done) {
|
||||
const pool = new Pool({ binary: true })
|
||||
pool.connect(function (err, client, release) {
|
||||
release()
|
||||
if (err) return done(err)
|
||||
expect(client.binary).to.eql(true)
|
||||
pool.end(done)
|
||||
})
|
||||
})
|
||||
|
||||
it('can run a query with a callback without parameters', function (done) {
|
||||
const pool = new Pool()
|
||||
pool.query('SELECT 1 as num', function (err, res) {
|
||||
expect(res.rows[0]).to.eql({ num: 1 })
|
||||
pool.end(function () {
|
||||
done(err)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('can run a query with a callback', function (done) {
|
||||
const pool = new Pool()
|
||||
pool.query('SELECT $1::text as name', ['brianc'], function (err, res) {
|
||||
expect(res.rows[0]).to.eql({ name: 'brianc' })
|
||||
pool.end(function () {
|
||||
done(err)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('passes connection errors to callback', function (done) {
|
||||
const pool = new Pool({ port: 53922 })
|
||||
pool.query('SELECT $1::text as name', ['brianc'], function (err, res) {
|
||||
expect(res).to.be(undefined)
|
||||
expect(err).to.be.an(Error)
|
||||
// a connection error should not polute the pool with a dead client
|
||||
expect(pool.totalCount).to.equal(0)
|
||||
pool.end(function (err) {
|
||||
done(err)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('does not pass client to error callback', function (done) {
|
||||
const pool = new Pool({ port: 58242 })
|
||||
pool.connect(function (err, client, release) {
|
||||
expect(err).to.be.an(Error)
|
||||
expect(client).to.be(undefined)
|
||||
expect(release).to.be.a(Function)
|
||||
pool.end(done)
|
||||
})
|
||||
})
|
||||
|
||||
it('removes client if it errors in background', function (done) {
|
||||
const pool = new Pool()
|
||||
pool.connect(function (err, client, release) {
|
||||
release()
|
||||
if (err) return done(err)
|
||||
client.testString = 'foo'
|
||||
setTimeout(function () {
|
||||
client.emit('error', new Error('on purpose'))
|
||||
}, 10)
|
||||
})
|
||||
pool.on('error', function (err) {
|
||||
expect(err.message).to.be('on purpose')
|
||||
expect(err.client).to.not.be(undefined)
|
||||
expect(err.client.testString).to.be('foo')
|
||||
err.client.connection.stream.on('end', function () {
|
||||
pool.end(done)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should not change given options', function (done) {
|
||||
const options = { max: 10 }
|
||||
const pool = new Pool(options)
|
||||
pool.connect(function (err, client, release) {
|
||||
release()
|
||||
if (err) return done(err)
|
||||
expect(options).to.eql({ max: 10 })
|
||||
pool.end(done)
|
||||
})
|
||||
})
|
||||
|
||||
it('does not create promises when connecting', function (done) {
|
||||
const pool = new Pool()
|
||||
const returnValue = pool.connect(function (err, client, release) {
|
||||
release()
|
||||
if (err) return done(err)
|
||||
pool.end(done)
|
||||
})
|
||||
expect(returnValue).to.be(undefined)
|
||||
})
|
||||
|
||||
it('does not create promises when querying', function (done) {
|
||||
const pool = new Pool()
|
||||
const returnValue = pool.query('SELECT 1 as num', function (err) {
|
||||
pool.end(function () {
|
||||
done(err)
|
||||
})
|
||||
})
|
||||
expect(returnValue).to.be(undefined)
|
||||
})
|
||||
|
||||
it('does not create promises when ending', function (done) {
|
||||
const pool = new Pool()
|
||||
const returnValue = pool.end(done)
|
||||
expect(returnValue).to.be(undefined)
|
||||
})
|
||||
|
||||
it('never calls callback syncronously', function (done) {
|
||||
const pool = new Pool()
|
||||
pool.connect((err, client) => {
|
||||
if (err) throw err
|
||||
client.release()
|
||||
setImmediate(() => {
|
||||
let called = false
|
||||
pool.connect((err, client) => {
|
||||
if (err) throw err
|
||||
called = true
|
||||
client.release()
|
||||
setImmediate(() => {
|
||||
pool.end(done)
|
||||
})
|
||||
})
|
||||
expect(called).to.equal(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('with promises', function () {
|
||||
it('connects, queries, and disconnects', function () {
|
||||
const pool = new Pool()
|
||||
return pool.connect().then(function (client) {
|
||||
return client.query('select $1::text as name', ['hi']).then(function (res) {
|
||||
expect(res.rows).to.eql([{ name: 'hi' }])
|
||||
client.release()
|
||||
return pool.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('executes a query directly', () => {
|
||||
const pool = new Pool()
|
||||
return pool.query('SELECT $1::text as name', ['hi']).then((res) => {
|
||||
expect(res.rows).to.have.length(1)
|
||||
expect(res.rows[0].name).to.equal('hi')
|
||||
return pool.end()
|
||||
})
|
||||
})
|
||||
|
||||
it('properly pools clients', function () {
|
||||
const pool = new Pool({ poolSize: 9 })
|
||||
const promises = _.times(30, function () {
|
||||
return pool.connect().then(function (client) {
|
||||
return client.query('select $1::text as name', ['hi']).then(function (res) {
|
||||
client.release()
|
||||
return res
|
||||
})
|
||||
})
|
||||
})
|
||||
return Promise.all(promises).then(function (res) {
|
||||
expect(res).to.have.length(30)
|
||||
expect(pool.totalCount).to.be(9)
|
||||
return pool.end()
|
||||
})
|
||||
})
|
||||
|
||||
it('supports just running queries', function () {
|
||||
const pool = new Pool({ poolSize: 9 })
|
||||
const text = 'select $1::text as name'
|
||||
const values = ['hi']
|
||||
const query = { text: text, values: values }
|
||||
const promises = _.times(30, () => pool.query(query))
|
||||
return Promise.all(promises).then(function (queries) {
|
||||
expect(queries).to.have.length(30)
|
||||
return pool.end()
|
||||
})
|
||||
})
|
||||
|
||||
it('recovers from query errors', function () {
|
||||
const pool = new Pool()
|
||||
|
||||
const errors = []
|
||||
const promises = _.times(30, () => {
|
||||
return pool.query('SELECT asldkfjasldkf').catch(function (e) {
|
||||
errors.push(e)
|
||||
})
|
||||
})
|
||||
return Promise.all(promises).then(() => {
|
||||
expect(errors).to.have.length(30)
|
||||
expect(pool.totalCount).to.equal(0)
|
||||
expect(pool.idleCount).to.equal(0)
|
||||
return pool.query('SELECT $1::text as name', ['hi']).then(function (res) {
|
||||
expect(res.rows).to.eql([{ name: 'hi' }])
|
||||
return pool.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
48
lib/node/node_modules/pg-pool/test/lifetime-timeout.js
generated
vendored
Normal file
48
lib/node/node_modules/pg-pool/test/lifetime-timeout.js
generated
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
'use strict'
|
||||
const co = require('co')
|
||||
const expect = require('expect.js')
|
||||
|
||||
const describe = require('mocha').describe
|
||||
const it = require('mocha').it
|
||||
const path = require('path')
|
||||
|
||||
const Pool = require('../')
|
||||
|
||||
describe('lifetime timeout', () => {
|
||||
it('connection lifetime should expire and remove the client', (done) => {
|
||||
const pool = new Pool({ maxLifetimeSeconds: 1 })
|
||||
pool.query('SELECT NOW()')
|
||||
pool.on('remove', () => {
|
||||
console.log('expired while idle - on-remove event')
|
||||
expect(pool.expiredCount).to.equal(0)
|
||||
expect(pool.totalCount).to.equal(0)
|
||||
done()
|
||||
})
|
||||
})
|
||||
it('connection lifetime should expire and remove the client after the client is done working', (done) => {
|
||||
const pool = new Pool({ maxLifetimeSeconds: 1 })
|
||||
pool.query('SELECT pg_sleep(1.4)')
|
||||
pool.on('remove', () => {
|
||||
console.log('expired while busy - on-remove event')
|
||||
expect(pool.expiredCount).to.equal(0)
|
||||
expect(pool.totalCount).to.equal(0)
|
||||
done()
|
||||
})
|
||||
})
|
||||
it(
|
||||
'can remove expired clients and recreate them',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool({ maxLifetimeSeconds: 1 })
|
||||
let query = pool.query('SELECT pg_sleep(1.4)')
|
||||
expect(pool.expiredCount).to.equal(0)
|
||||
expect(pool.totalCount).to.equal(1)
|
||||
yield query
|
||||
yield new Promise((resolve) => setTimeout(resolve, 100))
|
||||
expect(pool.expiredCount).to.equal(0)
|
||||
expect(pool.totalCount).to.equal(0)
|
||||
yield pool.query('SELECT NOW()')
|
||||
expect(pool.expiredCount).to.equal(0)
|
||||
expect(pool.totalCount).to.equal(1)
|
||||
})
|
||||
)
|
||||
})
|
20
lib/node/node_modules/pg-pool/test/logging.js
generated
vendored
Normal file
20
lib/node/node_modules/pg-pool/test/logging.js
generated
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
const expect = require('expect.js')
|
||||
|
||||
const describe = require('mocha').describe
|
||||
const it = require('mocha').it
|
||||
|
||||
const Pool = require('../')
|
||||
|
||||
describe('logging', function () {
|
||||
it('logs to supplied log function if given', function () {
|
||||
const messages = []
|
||||
const log = function (msg) {
|
||||
messages.push(msg)
|
||||
}
|
||||
const pool = new Pool({ log: log })
|
||||
return pool.query('SELECT NOW()').then(function () {
|
||||
expect(messages.length).to.be.greaterThan(0)
|
||||
return pool.end()
|
||||
})
|
||||
})
|
||||
})
|
98
lib/node/node_modules/pg-pool/test/max-uses.js
generated
vendored
Normal file
98
lib/node/node_modules/pg-pool/test/max-uses.js
generated
vendored
Normal file
|
@ -0,0 +1,98 @@
|
|||
const expect = require('expect.js')
|
||||
const co = require('co')
|
||||
const _ = require('lodash')
|
||||
|
||||
const describe = require('mocha').describe
|
||||
const it = require('mocha').it
|
||||
|
||||
const Pool = require('../')
|
||||
|
||||
describe('maxUses', () => {
|
||||
it(
|
||||
'can create a single client and use it once',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool({ maxUses: 2 })
|
||||
expect(pool.waitingCount).to.equal(0)
|
||||
const client = yield pool.connect()
|
||||
const res = yield client.query('SELECT $1::text as name', ['hi'])
|
||||
expect(res.rows[0].name).to.equal('hi')
|
||||
client.release()
|
||||
pool.end()
|
||||
})
|
||||
)
|
||||
|
||||
it(
|
||||
'getting a connection a second time returns the same connection and releasing it also closes it',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool({ maxUses: 2 })
|
||||
expect(pool.waitingCount).to.equal(0)
|
||||
const client = yield pool.connect()
|
||||
client.release()
|
||||
const client2 = yield pool.connect()
|
||||
expect(client).to.equal(client2)
|
||||
expect(client2._ending).to.equal(false)
|
||||
client2.release()
|
||||
expect(client2._ending).to.equal(true)
|
||||
return yield pool.end()
|
||||
})
|
||||
)
|
||||
|
||||
it(
|
||||
'getting a connection a third time returns a new connection',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool({ maxUses: 2 })
|
||||
expect(pool.waitingCount).to.equal(0)
|
||||
const client = yield pool.connect()
|
||||
client.release()
|
||||
const client2 = yield pool.connect()
|
||||
expect(client).to.equal(client2)
|
||||
client2.release()
|
||||
const client3 = yield pool.connect()
|
||||
expect(client3).not.to.equal(client2)
|
||||
client3.release()
|
||||
return yield pool.end()
|
||||
})
|
||||
)
|
||||
|
||||
it(
|
||||
'getting a connection from a pending request gets a fresh client when the released candidate is expended',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool({ max: 1, maxUses: 2 })
|
||||
expect(pool.waitingCount).to.equal(0)
|
||||
const client1 = yield pool.connect()
|
||||
pool.connect().then((client2) => {
|
||||
expect(client2).to.equal(client1)
|
||||
expect(pool.waitingCount).to.equal(1)
|
||||
// Releasing the client this time should also expend it since maxUses is 2, causing client3 to be a fresh client
|
||||
client2.release()
|
||||
})
|
||||
const client3Promise = pool.connect().then((client3) => {
|
||||
// client3 should be a fresh client since client2's release caused the first client to be expended
|
||||
expect(pool.waitingCount).to.equal(0)
|
||||
expect(client3).not.to.equal(client1)
|
||||
return client3.release()
|
||||
})
|
||||
// There should be two pending requests since we have 3 connect requests but a max size of 1
|
||||
expect(pool.waitingCount).to.equal(2)
|
||||
// Releasing the client should not yet expend it since maxUses is 2
|
||||
client1.release()
|
||||
yield client3Promise
|
||||
return yield pool.end()
|
||||
})
|
||||
)
|
||||
|
||||
it(
|
||||
'logs when removing an expended client',
|
||||
co.wrap(function* () {
|
||||
const messages = []
|
||||
const log = function (msg) {
|
||||
messages.push(msg)
|
||||
}
|
||||
const pool = new Pool({ maxUses: 1, log })
|
||||
const client = yield pool.connect()
|
||||
client.release()
|
||||
expect(messages).to.contain('remove expended client')
|
||||
return yield pool.end()
|
||||
})
|
||||
)
|
||||
})
|
54
lib/node/node_modules/pg-pool/test/releasing-clients.js
generated
vendored
Normal file
54
lib/node/node_modules/pg-pool/test/releasing-clients.js
generated
vendored
Normal file
|
@ -0,0 +1,54 @@
|
|||
const Pool = require('../')
|
||||
|
||||
const expect = require('expect.js')
|
||||
const net = require('net')
|
||||
|
||||
describe('releasing clients', () => {
|
||||
it('removes a client which cannot be queried', async () => {
|
||||
// make a pool w/ only 1 client
|
||||
const pool = new Pool({ max: 1 })
|
||||
expect(pool.totalCount).to.eql(0)
|
||||
const client = await pool.connect()
|
||||
expect(pool.totalCount).to.eql(1)
|
||||
expect(pool.idleCount).to.eql(0)
|
||||
// reach into the client and sever its connection
|
||||
client.connection.end()
|
||||
|
||||
// wait for the client to error out
|
||||
const err = await new Promise((resolve) => client.once('error', resolve))
|
||||
expect(err).to.be.ok()
|
||||
expect(pool.totalCount).to.eql(1)
|
||||
expect(pool.idleCount).to.eql(0)
|
||||
|
||||
// try to return it to the pool - this removes it because its broken
|
||||
client.release()
|
||||
expect(pool.totalCount).to.eql(0)
|
||||
expect(pool.idleCount).to.eql(0)
|
||||
|
||||
// make sure pool still works
|
||||
const { rows } = await pool.query('SELECT NOW()')
|
||||
expect(rows).to.have.length(1)
|
||||
await pool.end()
|
||||
})
|
||||
|
||||
it('removes a client which is ending', async () => {
|
||||
// make a pool w/ only 1 client
|
||||
const pool = new Pool({ max: 1 })
|
||||
expect(pool.totalCount).to.eql(0)
|
||||
const client = await pool.connect()
|
||||
expect(pool.totalCount).to.eql(1)
|
||||
expect(pool.idleCount).to.eql(0)
|
||||
// end the client gracefully (but you shouldn't do this with pooled clients)
|
||||
client.end()
|
||||
|
||||
// try to return it to the pool
|
||||
client.release()
|
||||
expect(pool.totalCount).to.eql(0)
|
||||
expect(pool.idleCount).to.eql(0)
|
||||
|
||||
// make sure pool still works
|
||||
const { rows } = await pool.query('SELECT NOW()')
|
||||
expect(rows).to.have.length(1)
|
||||
await pool.end()
|
||||
})
|
||||
})
|
10
lib/node/node_modules/pg-pool/test/setup.js
generated
vendored
Normal file
10
lib/node/node_modules/pg-pool/test/setup.js
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
const crash = (reason) => {
|
||||
process.on(reason, (err) => {
|
||||
console.error(reason, err.stack)
|
||||
process.exit(-1)
|
||||
})
|
||||
}
|
||||
|
||||
crash('unhandledRejection')
|
||||
crash('uncaughtError')
|
||||
crash('warning')
|
58
lib/node/node_modules/pg-pool/test/sizing.js
generated
vendored
Normal file
58
lib/node/node_modules/pg-pool/test/sizing.js
generated
vendored
Normal file
|
@ -0,0 +1,58 @@
|
|||
const expect = require('expect.js')
|
||||
const co = require('co')
|
||||
const _ = require('lodash')
|
||||
|
||||
const describe = require('mocha').describe
|
||||
const it = require('mocha').it
|
||||
|
||||
const Pool = require('../')
|
||||
|
||||
describe('pool size of 1', () => {
|
||||
it(
|
||||
'can create a single client and use it once',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool({ max: 1 })
|
||||
expect(pool.waitingCount).to.equal(0)
|
||||
const client = yield pool.connect()
|
||||
const res = yield client.query('SELECT $1::text as name', ['hi'])
|
||||
expect(res.rows[0].name).to.equal('hi')
|
||||
client.release()
|
||||
pool.end()
|
||||
})
|
||||
)
|
||||
|
||||
it(
|
||||
'can create a single client and use it multiple times',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool({ max: 1 })
|
||||
expect(pool.waitingCount).to.equal(0)
|
||||
const client = yield pool.connect()
|
||||
const wait = pool.connect()
|
||||
expect(pool.waitingCount).to.equal(1)
|
||||
client.release()
|
||||
const client2 = yield wait
|
||||
expect(client).to.equal(client2)
|
||||
client2.release()
|
||||
return yield pool.end()
|
||||
})
|
||||
)
|
||||
|
||||
it(
|
||||
'can only send 1 query at a time',
|
||||
co.wrap(function* () {
|
||||
const pool = new Pool({ max: 1 })
|
||||
|
||||
// the query text column name changed in PostgreSQL 9.2
|
||||
const versionResult = yield pool.query('SHOW server_version_num')
|
||||
const version = parseInt(versionResult.rows[0].server_version_num, 10)
|
||||
const queryColumn = version < 90200 ? 'current_query' : 'query'
|
||||
|
||||
const queryText = 'SELECT COUNT(*) as counts FROM pg_stat_activity WHERE ' + queryColumn + ' = $1'
|
||||
const queries = _.times(20, () => pool.query(queryText, [queryText]))
|
||||
const results = yield Promise.all(queries)
|
||||
const counts = results.map((res) => parseInt(res.rows[0].counts, 10))
|
||||
expect(counts).to.eql(_.times(20, (i) => 1))
|
||||
return yield pool.end()
|
||||
})
|
||||
)
|
||||
})
|
19
lib/node/node_modules/pg-pool/test/submittable.js
generated
vendored
Normal file
19
lib/node/node_modules/pg-pool/test/submittable.js
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
'use strict'
|
||||
const Cursor = require('pg-cursor')
|
||||
const expect = require('expect.js')
|
||||
const describe = require('mocha').describe
|
||||
const it = require('mocha').it
|
||||
|
||||
const Pool = require('../')
|
||||
|
||||
describe('submittle', () => {
|
||||
it('is returned from the query method', false, (done) => {
|
||||
const pool = new Pool()
|
||||
const cursor = pool.query(new Cursor('SELECT * from generate_series(0, 1000)'))
|
||||
cursor.read((err, rows) => {
|
||||
expect(err).to.be(undefined)
|
||||
expect(!!rows).to.be.ok()
|
||||
cursor.close(done)
|
||||
})
|
||||
})
|
||||
})
|
0
lib/node/node_modules/pg-pool/test/timeout.js
generated
vendored
Normal file
0
lib/node/node_modules/pg-pool/test/timeout.js
generated
vendored
Normal file
24
lib/node/node_modules/pg-pool/test/verify.js
generated
vendored
Normal file
24
lib/node/node_modules/pg-pool/test/verify.js
generated
vendored
Normal file
|
@ -0,0 +1,24 @@
|
|||
'use strict'
|
||||
const expect = require('expect.js')
|
||||
|
||||
const describe = require('mocha').describe
|
||||
const it = require('mocha').it
|
||||
|
||||
const Pool = require('../')
|
||||
|
||||
describe('verify', () => {
|
||||
it('verifies a client with a callback', (done) => {
|
||||
const pool = new Pool({
|
||||
verify: (client, cb) => {
|
||||
cb(new Error('nope'))
|
||||
},
|
||||
})
|
||||
|
||||
pool.connect((err, client) => {
|
||||
expect(err).to.be.an(Error)
|
||||
expect(err.message).to.be('nope')
|
||||
pool.end()
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
21
lib/node/node_modules/pg-protocol/LICENSE
generated
vendored
Normal file
21
lib/node/node_modules/pg-protocol/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2010 - 2021 Brian Carlson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
3
lib/node/node_modules/pg-protocol/README.md
generated
vendored
Normal file
3
lib/node/node_modules/pg-protocol/README.md
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
# pg-protocol
|
||||
|
||||
Low level postgres wire protocol parser and serializer written in Typescript. Used by node-postgres. Needs more documentation. :smile:
|
1
lib/node/node_modules/pg-protocol/dist/b.d.ts
generated
vendored
Normal file
1
lib/node/node_modules/pg-protocol/dist/b.d.ts
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
export {};
|
25
lib/node/node_modules/pg-protocol/dist/b.js
generated
vendored
Normal file
25
lib/node/node_modules/pg-protocol/dist/b.js
generated
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
"use strict";
|
||||
// file for microbenchmarking
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const buffer_writer_1 = require("./buffer-writer");
|
||||
const buffer_reader_1 = require("./buffer-reader");
|
||||
const LOOPS = 1000;
|
||||
let count = 0;
|
||||
let start = Date.now();
|
||||
const writer = new buffer_writer_1.Writer();
|
||||
const reader = new buffer_reader_1.BufferReader();
|
||||
const buffer = Buffer.from([33, 33, 33, 33, 33, 33, 33, 0]);
|
||||
const run = () => {
|
||||
if (count > LOOPS) {
|
||||
console.log(Date.now() - start);
|
||||
return;
|
||||
}
|
||||
count++;
|
||||
for (let i = 0; i < LOOPS; i++) {
|
||||
reader.setBuffer(0, buffer);
|
||||
reader.cstring();
|
||||
}
|
||||
setImmediate(run);
|
||||
};
|
||||
run();
|
||||
//# sourceMappingURL=b.js.map
|
1
lib/node/node_modules/pg-protocol/dist/b.js.map
generated
vendored
Normal file
1
lib/node/node_modules/pg-protocol/dist/b.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"b.js","sourceRoot":"","sources":["../src/b.ts"],"names":[],"mappings":";AAAA,6BAA6B;;AAE7B,mDAAwC;AAExC,mDAA8C;AAE9C,MAAM,KAAK,GAAG,IAAI,CAAA;AAClB,IAAI,KAAK,GAAG,CAAC,CAAA;AACb,IAAI,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;AACtB,MAAM,MAAM,GAAG,IAAI,sBAAM,EAAE,CAAA;AAE3B,MAAM,MAAM,GAAG,IAAI,4BAAY,EAAE,CAAA;AACjC,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAA;AAE3D,MAAM,GAAG,GAAG,GAAG,EAAE;IACf,IAAI,KAAK,GAAG,KAAK,EAAE;QACjB,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,KAAK,CAAC,CAAA;QAC/B,OAAM;KACP;IACD,KAAK,EAAE,CAAA;IACP,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC,EAAE,EAAE;QAC9B,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;QAC3B,MAAM,CAAC,OAAO,EAAE,CAAA;KACjB;IACD,YAAY,CAAC,GAAG,CAAC,CAAA;AACnB,CAAC,CAAA;AAED,GAAG,EAAE,CAAA"}
|
14
lib/node/node_modules/pg-protocol/dist/buffer-reader.d.ts
generated
vendored
Normal file
14
lib/node/node_modules/pg-protocol/dist/buffer-reader.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
/// <reference types="node" />
|
||||
export declare class BufferReader {
|
||||
private offset;
|
||||
private buffer;
|
||||
private encoding;
|
||||
constructor(offset?: number);
|
||||
setBuffer(offset: number, buffer: Buffer): void;
|
||||
int16(): number;
|
||||
byte(): number;
|
||||
int32(): number;
|
||||
string(length: number): string;
|
||||
cstring(): string;
|
||||
bytes(length: number): Buffer;
|
||||
}
|
50
lib/node/node_modules/pg-protocol/dist/buffer-reader.js
generated
vendored
Normal file
50
lib/node/node_modules/pg-protocol/dist/buffer-reader.js
generated
vendored
Normal file
|
@ -0,0 +1,50 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.BufferReader = void 0;
|
||||
const emptyBuffer = Buffer.allocUnsafe(0);
|
||||
class BufferReader {
|
||||
constructor(offset = 0) {
|
||||
this.offset = offset;
|
||||
this.buffer = emptyBuffer;
|
||||
// TODO(bmc): support non-utf8 encoding?
|
||||
this.encoding = 'utf-8';
|
||||
}
|
||||
setBuffer(offset, buffer) {
|
||||
this.offset = offset;
|
||||
this.buffer = buffer;
|
||||
}
|
||||
int16() {
|
||||
const result = this.buffer.readInt16BE(this.offset);
|
||||
this.offset += 2;
|
||||
return result;
|
||||
}
|
||||
byte() {
|
||||
const result = this.buffer[this.offset];
|
||||
this.offset++;
|
||||
return result;
|
||||
}
|
||||
int32() {
|
||||
const result = this.buffer.readInt32BE(this.offset);
|
||||
this.offset += 4;
|
||||
return result;
|
||||
}
|
||||
string(length) {
|
||||
const result = this.buffer.toString(this.encoding, this.offset, this.offset + length);
|
||||
this.offset += length;
|
||||
return result;
|
||||
}
|
||||
cstring() {
|
||||
const start = this.offset;
|
||||
let end = start;
|
||||
while (this.buffer[end++] !== 0) { }
|
||||
this.offset = end;
|
||||
return this.buffer.toString(this.encoding, start, end - 1);
|
||||
}
|
||||
bytes(length) {
|
||||
const result = this.buffer.slice(this.offset, this.offset + length);
|
||||
this.offset += length;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
exports.BufferReader = BufferReader;
|
||||
//# sourceMappingURL=buffer-reader.js.map
|
1
lib/node/node_modules/pg-protocol/dist/buffer-reader.js.map
generated
vendored
Normal file
1
lib/node/node_modules/pg-protocol/dist/buffer-reader.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"buffer-reader.js","sourceRoot":"","sources":["../src/buffer-reader.ts"],"names":[],"mappings":";;;AAAA,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAA;AAEzC,MAAa,YAAY;IAMvB,YAAoB,SAAiB,CAAC;QAAlB,WAAM,GAAN,MAAM,CAAY;QAL9B,WAAM,GAAW,WAAW,CAAA;QAEpC,wCAAwC;QAChC,aAAQ,GAAW,OAAO,CAAA;IAEO,CAAC;IAEnC,SAAS,CAAC,MAAc,EAAE,MAAc;QAC7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;QACpB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;IACtB,CAAC;IAEM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACnD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,IAAI;QACT,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACvC,IAAI,CAAC,MAAM,EAAE,CAAA;QACb,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACnD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,MAAM,CAAC,MAAc;QAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAA;QACrF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAA;QACrB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,OAAO;QACZ,MAAM,KAAK,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,GAAG,GAAG,KAAK,CAAA;QACf,OAAO,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC,EAAE,GAAE;QACnC,IAAI,CAAC,MAAM,GAAG,GAAG,CAAA;QACjB,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,EAAE,GAAG,GAAG,CAAC,CAAC,CAAA;IAC5D,CAAC;IAEM,KAAK,CAAC,MAAc;QACzB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAA;QACnE,IAAI,CAAC,MAAM,IAAI,MAAM,CAAA;QACrB,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAlDD,oCAkDC"}
|
16
lib/node/node_modules/pg-protocol/dist/buffer-writer.d.ts
generated
vendored
Normal file
16
lib/node/node_modules/pg-protocol/dist/buffer-writer.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
/// <reference types="node" />
|
||||
export declare class Writer {
|
||||
private size;
|
||||
private buffer;
|
||||
private offset;
|
||||
private headerPosition;
|
||||
constructor(size?: number);
|
||||
private ensure;
|
||||
addInt32(num: number): Writer;
|
||||
addInt16(num: number): Writer;
|
||||
addCString(string: string): Writer;
|
||||
addString(string?: string): Writer;
|
||||
add(otherBuffer: Buffer): Writer;
|
||||
private join;
|
||||
flush(code?: number): Buffer;
|
||||
}
|
81
lib/node/node_modules/pg-protocol/dist/buffer-writer.js
generated
vendored
Normal file
81
lib/node/node_modules/pg-protocol/dist/buffer-writer.js
generated
vendored
Normal file
|
@ -0,0 +1,81 @@
|
|||
"use strict";
|
||||
//binary data writer tuned for encoding binary specific to the postgres binary protocol
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Writer = void 0;
|
||||
class Writer {
|
||||
constructor(size = 256) {
|
||||
this.size = size;
|
||||
this.offset = 5;
|
||||
this.headerPosition = 0;
|
||||
this.buffer = Buffer.allocUnsafe(size);
|
||||
}
|
||||
ensure(size) {
|
||||
var remaining = this.buffer.length - this.offset;
|
||||
if (remaining < size) {
|
||||
var oldBuffer = this.buffer;
|
||||
// exponential growth factor of around ~ 1.5
|
||||
// https://stackoverflow.com/questions/2269063/buffer-growth-strategy
|
||||
var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size;
|
||||
this.buffer = Buffer.allocUnsafe(newSize);
|
||||
oldBuffer.copy(this.buffer);
|
||||
}
|
||||
}
|
||||
addInt32(num) {
|
||||
this.ensure(4);
|
||||
this.buffer[this.offset++] = (num >>> 24) & 0xff;
|
||||
this.buffer[this.offset++] = (num >>> 16) & 0xff;
|
||||
this.buffer[this.offset++] = (num >>> 8) & 0xff;
|
||||
this.buffer[this.offset++] = (num >>> 0) & 0xff;
|
||||
return this;
|
||||
}
|
||||
addInt16(num) {
|
||||
this.ensure(2);
|
||||
this.buffer[this.offset++] = (num >>> 8) & 0xff;
|
||||
this.buffer[this.offset++] = (num >>> 0) & 0xff;
|
||||
return this;
|
||||
}
|
||||
addCString(string) {
|
||||
if (!string) {
|
||||
this.ensure(1);
|
||||
}
|
||||
else {
|
||||
var len = Buffer.byteLength(string);
|
||||
this.ensure(len + 1); // +1 for null terminator
|
||||
this.buffer.write(string, this.offset, 'utf-8');
|
||||
this.offset += len;
|
||||
}
|
||||
this.buffer[this.offset++] = 0; // null terminator
|
||||
return this;
|
||||
}
|
||||
addString(string = '') {
|
||||
var len = Buffer.byteLength(string);
|
||||
this.ensure(len);
|
||||
this.buffer.write(string, this.offset);
|
||||
this.offset += len;
|
||||
return this;
|
||||
}
|
||||
add(otherBuffer) {
|
||||
this.ensure(otherBuffer.length);
|
||||
otherBuffer.copy(this.buffer, this.offset);
|
||||
this.offset += otherBuffer.length;
|
||||
return this;
|
||||
}
|
||||
join(code) {
|
||||
if (code) {
|
||||
this.buffer[this.headerPosition] = code;
|
||||
//length is everything in this packet minus the code
|
||||
const length = this.offset - (this.headerPosition + 1);
|
||||
this.buffer.writeInt32BE(length, this.headerPosition + 1);
|
||||
}
|
||||
return this.buffer.slice(code ? 0 : 5, this.offset);
|
||||
}
|
||||
flush(code) {
|
||||
var result = this.join(code);
|
||||
this.offset = 5;
|
||||
this.headerPosition = 0;
|
||||
this.buffer = Buffer.allocUnsafe(this.size);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
exports.Writer = Writer;
|
||||
//# sourceMappingURL=buffer-writer.js.map
|
1
lib/node/node_modules/pg-protocol/dist/buffer-writer.js.map
generated
vendored
Normal file
1
lib/node/node_modules/pg-protocol/dist/buffer-writer.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"buffer-writer.js","sourceRoot":"","sources":["../src/buffer-writer.ts"],"names":[],"mappings":";AAAA,uFAAuF;;;AAEvF,MAAa,MAAM;IAIjB,YAAoB,OAAO,GAAG;QAAV,SAAI,GAAJ,IAAI,CAAM;QAFtB,WAAM,GAAW,CAAC,CAAA;QAClB,mBAAc,GAAW,CAAC,CAAA;QAEhC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,CAAA;IACxC,CAAC;IAEO,MAAM,CAAC,IAAY;QACzB,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAChD,IAAI,SAAS,GAAG,IAAI,EAAE;YACpB,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAA;YAC3B,4CAA4C;YAC5C,qEAAqE;YACrE,IAAI,OAAO,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,SAAS,CAAC,MAAM,IAAI,CAAC,CAAC,GAAG,IAAI,CAAA;YAC/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,OAAO,CAAC,CAAA;YACzC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;SAC5B;IACH,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACd,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,CAAA;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,CAAA;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACd,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,UAAU,CAAC,MAAc;QAC9B,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SACf;aAAM;YACL,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;YACnC,IAAI,CAAC,MAAM,CAAC,GAAG,GAAG,CAAC,CAAC,CAAA,CAAC,yBAAyB;YAC9C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;YAC/C,IAAI,CAAC,MAAM,IAAI,GAAG,CAAA;SACnB;QAED,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAA,CAAC,kBAAkB;QACjD,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,SAAS,CAAC,SAAiB,EAAE;QAClC,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;QACnC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QAChB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QACtC,IAAI,CAAC,MAAM,IAAI,GAAG,CAAA;QAClB,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,GAAG,CAAC,WAAmB;QAC5B,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,MAAM,CAAC,CAAA;QAC/B,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QAC1C,IAAI,CAAC,MAAM,IAAI,WAAW,CAAC,MAAM,CAAA;QACjC,OAAO,IAAI,CAAA;IACb,CAAC;IAEO,IAAI,CAAC,IAAa;QACxB,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,GAAG,IAAI,CAAA;YACvC,oDAAoD;YACpD,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAA;YACtD,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAA;SAC1D;QACD,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;IACrD,CAAC;IAEM,KAAK,CAAC,IAAa;QACxB,IAAI,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;QACf,IAAI,CAAC,cAAc,GAAG,CAAC,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC3C,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAlFD,wBAkFC"}
|
1
lib/node/node_modules/pg-protocol/dist/inbound-parser.test.d.ts
generated
vendored
Normal file
1
lib/node/node_modules/pg-protocol/dist/inbound-parser.test.d.ts
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
export {};
|
511
lib/node/node_modules/pg-protocol/dist/inbound-parser.test.js
generated
vendored
Normal file
511
lib/node/node_modules/pg-protocol/dist/inbound-parser.test.js
generated
vendored
Normal file
|
@ -0,0 +1,511 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const test_buffers_1 = __importDefault(require("./testing/test-buffers"));
|
||||
const buffer_list_1 = __importDefault(require("./testing/buffer-list"));
|
||||
const _1 = require(".");
|
||||
const assert_1 = __importDefault(require("assert"));
|
||||
const stream_1 = require("stream");
|
||||
var authOkBuffer = test_buffers_1.default.authenticationOk();
|
||||
var paramStatusBuffer = test_buffers_1.default.parameterStatus('client_encoding', 'UTF8');
|
||||
var readyForQueryBuffer = test_buffers_1.default.readyForQuery();
|
||||
var backendKeyDataBuffer = test_buffers_1.default.backendKeyData(1, 2);
|
||||
var commandCompleteBuffer = test_buffers_1.default.commandComplete('SELECT 3');
|
||||
var parseCompleteBuffer = test_buffers_1.default.parseComplete();
|
||||
var bindCompleteBuffer = test_buffers_1.default.bindComplete();
|
||||
var portalSuspendedBuffer = test_buffers_1.default.portalSuspended();
|
||||
var addRow = function (bufferList, name, offset) {
|
||||
return bufferList
|
||||
.addCString(name) // field name
|
||||
.addInt32(offset++) // table id
|
||||
.addInt16(offset++) // attribute of column number
|
||||
.addInt32(offset++) // objectId of field's data type
|
||||
.addInt16(offset++) // datatype size
|
||||
.addInt32(offset++) // type modifier
|
||||
.addInt16(0); // format code, 0 => text
|
||||
};
|
||||
var row1 = {
|
||||
name: 'id',
|
||||
tableID: 1,
|
||||
attributeNumber: 2,
|
||||
dataTypeID: 3,
|
||||
dataTypeSize: 4,
|
||||
typeModifier: 5,
|
||||
formatCode: 0,
|
||||
};
|
||||
var oneRowDescBuff = test_buffers_1.default.rowDescription([row1]);
|
||||
row1.name = 'bang';
|
||||
var twoRowBuf = test_buffers_1.default.rowDescription([
|
||||
row1,
|
||||
{
|
||||
name: 'whoah',
|
||||
tableID: 10,
|
||||
attributeNumber: 11,
|
||||
dataTypeID: 12,
|
||||
dataTypeSize: 13,
|
||||
typeModifier: 14,
|
||||
formatCode: 0,
|
||||
},
|
||||
]);
|
||||
var emptyRowFieldBuf = new buffer_list_1.default().addInt16(0).join(true, 'D');
|
||||
var emptyRowFieldBuf = test_buffers_1.default.dataRow([]);
|
||||
var oneFieldBuf = new buffer_list_1.default()
|
||||
.addInt16(1) // number of fields
|
||||
.addInt32(5) // length of bytes of fields
|
||||
.addCString('test')
|
||||
.join(true, 'D');
|
||||
var oneFieldBuf = test_buffers_1.default.dataRow(['test']);
|
||||
var expectedAuthenticationOkayMessage = {
|
||||
name: 'authenticationOk',
|
||||
length: 8,
|
||||
};
|
||||
var expectedParameterStatusMessage = {
|
||||
name: 'parameterStatus',
|
||||
parameterName: 'client_encoding',
|
||||
parameterValue: 'UTF8',
|
||||
length: 25,
|
||||
};
|
||||
var expectedBackendKeyDataMessage = {
|
||||
name: 'backendKeyData',
|
||||
processID: 1,
|
||||
secretKey: 2,
|
||||
};
|
||||
var expectedReadyForQueryMessage = {
|
||||
name: 'readyForQuery',
|
||||
length: 5,
|
||||
status: 'I',
|
||||
};
|
||||
var expectedCommandCompleteMessage = {
|
||||
name: 'commandComplete',
|
||||
length: 13,
|
||||
text: 'SELECT 3',
|
||||
};
|
||||
var emptyRowDescriptionBuffer = new buffer_list_1.default()
|
||||
.addInt16(0) // number of fields
|
||||
.join(true, 'T');
|
||||
var expectedEmptyRowDescriptionMessage = {
|
||||
name: 'rowDescription',
|
||||
length: 6,
|
||||
fieldCount: 0,
|
||||
fields: [],
|
||||
};
|
||||
var expectedOneRowMessage = {
|
||||
name: 'rowDescription',
|
||||
length: 27,
|
||||
fieldCount: 1,
|
||||
fields: [
|
||||
{
|
||||
name: 'id',
|
||||
tableID: 1,
|
||||
columnID: 2,
|
||||
dataTypeID: 3,
|
||||
dataTypeSize: 4,
|
||||
dataTypeModifier: 5,
|
||||
format: 'text',
|
||||
},
|
||||
],
|
||||
};
|
||||
var expectedTwoRowMessage = {
|
||||
name: 'rowDescription',
|
||||
length: 53,
|
||||
fieldCount: 2,
|
||||
fields: [
|
||||
{
|
||||
name: 'bang',
|
||||
tableID: 1,
|
||||
columnID: 2,
|
||||
dataTypeID: 3,
|
||||
dataTypeSize: 4,
|
||||
dataTypeModifier: 5,
|
||||
format: 'text',
|
||||
},
|
||||
{
|
||||
name: 'whoah',
|
||||
tableID: 10,
|
||||
columnID: 11,
|
||||
dataTypeID: 12,
|
||||
dataTypeSize: 13,
|
||||
dataTypeModifier: 14,
|
||||
format: 'text',
|
||||
},
|
||||
],
|
||||
};
|
||||
var emptyParameterDescriptionBuffer = new buffer_list_1.default()
|
||||
.addInt16(0) // number of parameters
|
||||
.join(true, 't');
|
||||
var oneParameterDescBuf = test_buffers_1.default.parameterDescription([1111]);
|
||||
var twoParameterDescBuf = test_buffers_1.default.parameterDescription([2222, 3333]);
|
||||
var expectedEmptyParameterDescriptionMessage = {
|
||||
name: 'parameterDescription',
|
||||
length: 6,
|
||||
parameterCount: 0,
|
||||
dataTypeIDs: [],
|
||||
};
|
||||
var expectedOneParameterMessage = {
|
||||
name: 'parameterDescription',
|
||||
length: 10,
|
||||
parameterCount: 1,
|
||||
dataTypeIDs: [1111],
|
||||
};
|
||||
var expectedTwoParameterMessage = {
|
||||
name: 'parameterDescription',
|
||||
length: 14,
|
||||
parameterCount: 2,
|
||||
dataTypeIDs: [2222, 3333],
|
||||
};
|
||||
var testForMessage = function (buffer, expectedMessage) {
|
||||
it('recieves and parses ' + expectedMessage.name, () => __awaiter(this, void 0, void 0, function* () {
|
||||
const messages = yield parseBuffers([buffer]);
|
||||
const [lastMessage] = messages;
|
||||
for (const key in expectedMessage) {
|
||||
assert_1.default.deepEqual(lastMessage[key], expectedMessage[key]);
|
||||
}
|
||||
}));
|
||||
};
|
||||
var plainPasswordBuffer = test_buffers_1.default.authenticationCleartextPassword();
|
||||
var md5PasswordBuffer = test_buffers_1.default.authenticationMD5Password();
|
||||
var SASLBuffer = test_buffers_1.default.authenticationSASL();
|
||||
var SASLContinueBuffer = test_buffers_1.default.authenticationSASLContinue();
|
||||
var SASLFinalBuffer = test_buffers_1.default.authenticationSASLFinal();
|
||||
var expectedPlainPasswordMessage = {
|
||||
name: 'authenticationCleartextPassword',
|
||||
};
|
||||
var expectedMD5PasswordMessage = {
|
||||
name: 'authenticationMD5Password',
|
||||
salt: Buffer.from([1, 2, 3, 4]),
|
||||
};
|
||||
var expectedSASLMessage = {
|
||||
name: 'authenticationSASL',
|
||||
mechanisms: ['SCRAM-SHA-256'],
|
||||
};
|
||||
var expectedSASLContinueMessage = {
|
||||
name: 'authenticationSASLContinue',
|
||||
data: 'data',
|
||||
};
|
||||
var expectedSASLFinalMessage = {
|
||||
name: 'authenticationSASLFinal',
|
||||
data: 'data',
|
||||
};
|
||||
var notificationResponseBuffer = test_buffers_1.default.notification(4, 'hi', 'boom');
|
||||
var expectedNotificationResponseMessage = {
|
||||
name: 'notification',
|
||||
processId: 4,
|
||||
channel: 'hi',
|
||||
payload: 'boom',
|
||||
};
|
||||
const parseBuffers = (buffers) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const stream = new stream_1.PassThrough();
|
||||
for (const buffer of buffers) {
|
||||
stream.write(buffer);
|
||||
}
|
||||
stream.end();
|
||||
const msgs = [];
|
||||
yield (0, _1.parse)(stream, (msg) => msgs.push(msg));
|
||||
return msgs;
|
||||
});
|
||||
describe('PgPacketStream', function () {
|
||||
testForMessage(authOkBuffer, expectedAuthenticationOkayMessage);
|
||||
testForMessage(plainPasswordBuffer, expectedPlainPasswordMessage);
|
||||
testForMessage(md5PasswordBuffer, expectedMD5PasswordMessage);
|
||||
testForMessage(SASLBuffer, expectedSASLMessage);
|
||||
testForMessage(SASLContinueBuffer, expectedSASLContinueMessage);
|
||||
// this exercises a found bug in the parser:
|
||||
// https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084
|
||||
// and adds a test which is deterministic, rather than relying on network packet chunking
|
||||
const extendedSASLContinueBuffer = Buffer.concat([SASLContinueBuffer, Buffer.from([1, 2, 3, 4])]);
|
||||
testForMessage(extendedSASLContinueBuffer, expectedSASLContinueMessage);
|
||||
testForMessage(SASLFinalBuffer, expectedSASLFinalMessage);
|
||||
// this exercises a found bug in the parser:
|
||||
// https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084
|
||||
// and adds a test which is deterministic, rather than relying on network packet chunking
|
||||
const extendedSASLFinalBuffer = Buffer.concat([SASLFinalBuffer, Buffer.from([1, 2, 4, 5])]);
|
||||
testForMessage(extendedSASLFinalBuffer, expectedSASLFinalMessage);
|
||||
testForMessage(paramStatusBuffer, expectedParameterStatusMessage);
|
||||
testForMessage(backendKeyDataBuffer, expectedBackendKeyDataMessage);
|
||||
testForMessage(readyForQueryBuffer, expectedReadyForQueryMessage);
|
||||
testForMessage(commandCompleteBuffer, expectedCommandCompleteMessage);
|
||||
testForMessage(notificationResponseBuffer, expectedNotificationResponseMessage);
|
||||
testForMessage(test_buffers_1.default.emptyQuery(), {
|
||||
name: 'emptyQuery',
|
||||
length: 4,
|
||||
});
|
||||
testForMessage(Buffer.from([0x6e, 0, 0, 0, 4]), {
|
||||
name: 'noData',
|
||||
});
|
||||
describe('rowDescription messages', function () {
|
||||
testForMessage(emptyRowDescriptionBuffer, expectedEmptyRowDescriptionMessage);
|
||||
testForMessage(oneRowDescBuff, expectedOneRowMessage);
|
||||
testForMessage(twoRowBuf, expectedTwoRowMessage);
|
||||
});
|
||||
describe('parameterDescription messages', function () {
|
||||
testForMessage(emptyParameterDescriptionBuffer, expectedEmptyParameterDescriptionMessage);
|
||||
testForMessage(oneParameterDescBuf, expectedOneParameterMessage);
|
||||
testForMessage(twoParameterDescBuf, expectedTwoParameterMessage);
|
||||
});
|
||||
describe('parsing rows', function () {
|
||||
describe('parsing empty row', function () {
|
||||
testForMessage(emptyRowFieldBuf, {
|
||||
name: 'dataRow',
|
||||
fieldCount: 0,
|
||||
});
|
||||
});
|
||||
describe('parsing data row with fields', function () {
|
||||
testForMessage(oneFieldBuf, {
|
||||
name: 'dataRow',
|
||||
fieldCount: 1,
|
||||
fields: ['test'],
|
||||
});
|
||||
});
|
||||
});
|
||||
describe('notice message', function () {
|
||||
// this uses the same logic as error message
|
||||
var buff = test_buffers_1.default.notice([{ type: 'C', value: 'code' }]);
|
||||
testForMessage(buff, {
|
||||
name: 'notice',
|
||||
code: 'code',
|
||||
});
|
||||
});
|
||||
testForMessage(test_buffers_1.default.error([]), {
|
||||
name: 'error',
|
||||
});
|
||||
describe('with all the fields', function () {
|
||||
var buffer = test_buffers_1.default.error([
|
||||
{
|
||||
type: 'S',
|
||||
value: 'ERROR',
|
||||
},
|
||||
{
|
||||
type: 'C',
|
||||
value: 'code',
|
||||
},
|
||||
{
|
||||
type: 'M',
|
||||
value: 'message',
|
||||
},
|
||||
{
|
||||
type: 'D',
|
||||
value: 'details',
|
||||
},
|
||||
{
|
||||
type: 'H',
|
||||
value: 'hint',
|
||||
},
|
||||
{
|
||||
type: 'P',
|
||||
value: '100',
|
||||
},
|
||||
{
|
||||
type: 'p',
|
||||
value: '101',
|
||||
},
|
||||
{
|
||||
type: 'q',
|
||||
value: 'query',
|
||||
},
|
||||
{
|
||||
type: 'W',
|
||||
value: 'where',
|
||||
},
|
||||
{
|
||||
type: 'F',
|
||||
value: 'file',
|
||||
},
|
||||
{
|
||||
type: 'L',
|
||||
value: 'line',
|
||||
},
|
||||
{
|
||||
type: 'R',
|
||||
value: 'routine',
|
||||
},
|
||||
{
|
||||
type: 'Z',
|
||||
value: 'alsdkf',
|
||||
},
|
||||
]);
|
||||
testForMessage(buffer, {
|
||||
name: 'error',
|
||||
severity: 'ERROR',
|
||||
code: 'code',
|
||||
message: 'message',
|
||||
detail: 'details',
|
||||
hint: 'hint',
|
||||
position: '100',
|
||||
internalPosition: '101',
|
||||
internalQuery: 'query',
|
||||
where: 'where',
|
||||
file: 'file',
|
||||
line: 'line',
|
||||
routine: 'routine',
|
||||
});
|
||||
});
|
||||
testForMessage(parseCompleteBuffer, {
|
||||
name: 'parseComplete',
|
||||
});
|
||||
testForMessage(bindCompleteBuffer, {
|
||||
name: 'bindComplete',
|
||||
});
|
||||
testForMessage(bindCompleteBuffer, {
|
||||
name: 'bindComplete',
|
||||
});
|
||||
testForMessage(test_buffers_1.default.closeComplete(), {
|
||||
name: 'closeComplete',
|
||||
});
|
||||
describe('parses portal suspended message', function () {
|
||||
testForMessage(portalSuspendedBuffer, {
|
||||
name: 'portalSuspended',
|
||||
});
|
||||
});
|
||||
describe('parses replication start message', function () {
|
||||
testForMessage(Buffer.from([0x57, 0x00, 0x00, 0x00, 0x04]), {
|
||||
name: 'replicationStart',
|
||||
length: 4,
|
||||
});
|
||||
});
|
||||
describe('copy', () => {
|
||||
testForMessage(test_buffers_1.default.copyIn(0), {
|
||||
name: 'copyInResponse',
|
||||
length: 7,
|
||||
binary: false,
|
||||
columnTypes: [],
|
||||
});
|
||||
testForMessage(test_buffers_1.default.copyIn(2), {
|
||||
name: 'copyInResponse',
|
||||
length: 11,
|
||||
binary: false,
|
||||
columnTypes: [0, 1],
|
||||
});
|
||||
testForMessage(test_buffers_1.default.copyOut(0), {
|
||||
name: 'copyOutResponse',
|
||||
length: 7,
|
||||
binary: false,
|
||||
columnTypes: [],
|
||||
});
|
||||
testForMessage(test_buffers_1.default.copyOut(3), {
|
||||
name: 'copyOutResponse',
|
||||
length: 13,
|
||||
binary: false,
|
||||
columnTypes: [0, 1, 2],
|
||||
});
|
||||
testForMessage(test_buffers_1.default.copyDone(), {
|
||||
name: 'copyDone',
|
||||
length: 4,
|
||||
});
|
||||
testForMessage(test_buffers_1.default.copyData(Buffer.from([5, 6, 7])), {
|
||||
name: 'copyData',
|
||||
length: 7,
|
||||
chunk: Buffer.from([5, 6, 7]),
|
||||
});
|
||||
});
|
||||
// since the data message on a stream can randomly divide the incomming
|
||||
// tcp packets anywhere, we need to make sure we can parse every single
|
||||
// split on a tcp message
|
||||
describe('split buffer, single message parsing', function () {
|
||||
var fullBuffer = test_buffers_1.default.dataRow([null, 'bang', 'zug zug', null, '!']);
|
||||
it('parses when full buffer comes in', function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const messages = yield parseBuffers([fullBuffer]);
|
||||
const message = messages[0];
|
||||
assert_1.default.equal(message.fields.length, 5);
|
||||
assert_1.default.equal(message.fields[0], null);
|
||||
assert_1.default.equal(message.fields[1], 'bang');
|
||||
assert_1.default.equal(message.fields[2], 'zug zug');
|
||||
assert_1.default.equal(message.fields[3], null);
|
||||
assert_1.default.equal(message.fields[4], '!');
|
||||
});
|
||||
});
|
||||
var testMessageRecievedAfterSpiltAt = function (split) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
var firstBuffer = Buffer.alloc(fullBuffer.length - split);
|
||||
var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length);
|
||||
fullBuffer.copy(firstBuffer, 0, 0);
|
||||
fullBuffer.copy(secondBuffer, 0, firstBuffer.length);
|
||||
const messages = yield parseBuffers([fullBuffer]);
|
||||
const message = messages[0];
|
||||
assert_1.default.equal(message.fields.length, 5);
|
||||
assert_1.default.equal(message.fields[0], null);
|
||||
assert_1.default.equal(message.fields[1], 'bang');
|
||||
assert_1.default.equal(message.fields[2], 'zug zug');
|
||||
assert_1.default.equal(message.fields[3], null);
|
||||
assert_1.default.equal(message.fields[4], '!');
|
||||
});
|
||||
};
|
||||
it('parses when split in the middle', function () {
|
||||
testMessageRecievedAfterSpiltAt(6);
|
||||
});
|
||||
it('parses when split at end', function () {
|
||||
testMessageRecievedAfterSpiltAt(2);
|
||||
});
|
||||
it('parses when split at beginning', function () {
|
||||
testMessageRecievedAfterSpiltAt(fullBuffer.length - 2);
|
||||
testMessageRecievedAfterSpiltAt(fullBuffer.length - 1);
|
||||
testMessageRecievedAfterSpiltAt(fullBuffer.length - 5);
|
||||
});
|
||||
});
|
||||
describe('split buffer, multiple message parsing', function () {
|
||||
var dataRowBuffer = test_buffers_1.default.dataRow(['!']);
|
||||
var readyForQueryBuffer = test_buffers_1.default.readyForQuery();
|
||||
var fullBuffer = Buffer.alloc(dataRowBuffer.length + readyForQueryBuffer.length);
|
||||
dataRowBuffer.copy(fullBuffer, 0, 0);
|
||||
readyForQueryBuffer.copy(fullBuffer, dataRowBuffer.length, 0);
|
||||
var verifyMessages = function (messages) {
|
||||
assert_1.default.strictEqual(messages.length, 2);
|
||||
assert_1.default.deepEqual(messages[0], {
|
||||
name: 'dataRow',
|
||||
fieldCount: 1,
|
||||
length: 11,
|
||||
fields: ['!'],
|
||||
});
|
||||
assert_1.default.equal(messages[0].fields[0], '!');
|
||||
assert_1.default.deepEqual(messages[1], {
|
||||
name: 'readyForQuery',
|
||||
length: 5,
|
||||
status: 'I',
|
||||
});
|
||||
};
|
||||
// sanity check
|
||||
it('recieves both messages when packet is not split', function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const messages = yield parseBuffers([fullBuffer]);
|
||||
verifyMessages(messages);
|
||||
});
|
||||
});
|
||||
var splitAndVerifyTwoMessages = function (split) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
var firstBuffer = Buffer.alloc(fullBuffer.length - split);
|
||||
var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length);
|
||||
fullBuffer.copy(firstBuffer, 0, 0);
|
||||
fullBuffer.copy(secondBuffer, 0, firstBuffer.length);
|
||||
const messages = yield parseBuffers([firstBuffer, secondBuffer]);
|
||||
verifyMessages(messages);
|
||||
});
|
||||
};
|
||||
describe('recieves both messages when packet is split', function () {
|
||||
it('in the middle', function () {
|
||||
return splitAndVerifyTwoMessages(11);
|
||||
});
|
||||
it('at the front', function () {
|
||||
return Promise.all([
|
||||
splitAndVerifyTwoMessages(fullBuffer.length - 1),
|
||||
splitAndVerifyTwoMessages(fullBuffer.length - 4),
|
||||
splitAndVerifyTwoMessages(fullBuffer.length - 6),
|
||||
]);
|
||||
});
|
||||
it('at the end', function () {
|
||||
return Promise.all([splitAndVerifyTwoMessages(8), splitAndVerifyTwoMessages(1)]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=inbound-parser.test.js.map
|
1
lib/node/node_modules/pg-protocol/dist/inbound-parser.test.js.map
generated
vendored
Normal file
1
lib/node/node_modules/pg-protocol/dist/inbound-parser.test.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
6
lib/node/node_modules/pg-protocol/dist/index.d.ts
generated
vendored
Normal file
6
lib/node/node_modules/pg-protocol/dist/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
/// <reference types="node" />
|
||||
import { DatabaseError } from './messages';
|
||||
import { serialize } from './serializer';
|
||||
import { MessageCallback } from './parser';
|
||||
export declare function parse(stream: NodeJS.ReadableStream, callback: MessageCallback): Promise<void>;
|
||||
export { serialize, DatabaseError };
|
15
lib/node/node_modules/pg-protocol/dist/index.js
generated
vendored
Normal file
15
lib/node/node_modules/pg-protocol/dist/index.js
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.DatabaseError = exports.serialize = exports.parse = void 0;
|
||||
const messages_1 = require("./messages");
|
||||
Object.defineProperty(exports, "DatabaseError", { enumerable: true, get: function () { return messages_1.DatabaseError; } });
|
||||
const serializer_1 = require("./serializer");
|
||||
Object.defineProperty(exports, "serialize", { enumerable: true, get: function () { return serializer_1.serialize; } });
|
||||
const parser_1 = require("./parser");
|
||||
function parse(stream, callback) {
|
||||
const parser = new parser_1.Parser();
|
||||
stream.on('data', (buffer) => parser.parse(buffer, callback));
|
||||
return new Promise((resolve) => stream.on('end', () => resolve()));
|
||||
}
|
||||
exports.parse = parse;
|
||||
//# sourceMappingURL=index.js.map
|
1
lib/node/node_modules/pg-protocol/dist/index.js.map
generated
vendored
Normal file
1
lib/node/node_modules/pg-protocol/dist/index.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AAAA,yCAA0D;AAUtC,8FAVK,wBAAa,OAUL;AATjC,6CAAwC;AAS/B,0FATA,sBAAS,OASA;AARlB,qCAAkD;AAElD,SAAgB,KAAK,CAAC,MAA6B,EAAE,QAAyB;IAC5E,MAAM,MAAM,GAAG,IAAI,eAAM,EAAE,CAAA;IAC3B,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,MAAc,EAAE,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAA;IACrE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC,CAAA;AACpE,CAAC;AAJD,sBAIC"}
|
162
lib/node/node_modules/pg-protocol/dist/messages.d.ts
generated
vendored
Normal file
162
lib/node/node_modules/pg-protocol/dist/messages.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,162 @@
|
|||
/// <reference types="node" />
|
||||
export declare type Mode = 'text' | 'binary';
|
||||
export declare type MessageName = 'parseComplete' | 'bindComplete' | 'closeComplete' | 'noData' | 'portalSuspended' | 'replicationStart' | 'emptyQuery' | 'copyDone' | 'copyData' | 'rowDescription' | 'parameterDescription' | 'parameterStatus' | 'backendKeyData' | 'notification' | 'readyForQuery' | 'commandComplete' | 'dataRow' | 'copyInResponse' | 'copyOutResponse' | 'authenticationOk' | 'authenticationMD5Password' | 'authenticationCleartextPassword' | 'authenticationSASL' | 'authenticationSASLContinue' | 'authenticationSASLFinal' | 'error' | 'notice';
|
||||
export interface BackendMessage {
|
||||
name: MessageName;
|
||||
length: number;
|
||||
}
|
||||
export declare const parseComplete: BackendMessage;
|
||||
export declare const bindComplete: BackendMessage;
|
||||
export declare const closeComplete: BackendMessage;
|
||||
export declare const noData: BackendMessage;
|
||||
export declare const portalSuspended: BackendMessage;
|
||||
export declare const replicationStart: BackendMessage;
|
||||
export declare const emptyQuery: BackendMessage;
|
||||
export declare const copyDone: BackendMessage;
|
||||
interface NoticeOrError {
|
||||
message: string | undefined;
|
||||
severity: string | undefined;
|
||||
code: string | undefined;
|
||||
detail: string | undefined;
|
||||
hint: string | undefined;
|
||||
position: string | undefined;
|
||||
internalPosition: string | undefined;
|
||||
internalQuery: string | undefined;
|
||||
where: string | undefined;
|
||||
schema: string | undefined;
|
||||
table: string | undefined;
|
||||
column: string | undefined;
|
||||
dataType: string | undefined;
|
||||
constraint: string | undefined;
|
||||
file: string | undefined;
|
||||
line: string | undefined;
|
||||
routine: string | undefined;
|
||||
}
|
||||
export declare class DatabaseError extends Error implements NoticeOrError {
|
||||
readonly length: number;
|
||||
readonly name: MessageName;
|
||||
severity: string | undefined;
|
||||
code: string | undefined;
|
||||
detail: string | undefined;
|
||||
hint: string | undefined;
|
||||
position: string | undefined;
|
||||
internalPosition: string | undefined;
|
||||
internalQuery: string | undefined;
|
||||
where: string | undefined;
|
||||
schema: string | undefined;
|
||||
table: string | undefined;
|
||||
column: string | undefined;
|
||||
dataType: string | undefined;
|
||||
constraint: string | undefined;
|
||||
file: string | undefined;
|
||||
line: string | undefined;
|
||||
routine: string | undefined;
|
||||
constructor(message: string, length: number, name: MessageName);
|
||||
}
|
||||
export declare class CopyDataMessage {
|
||||
readonly length: number;
|
||||
readonly chunk: Buffer;
|
||||
readonly name = "copyData";
|
||||
constructor(length: number, chunk: Buffer);
|
||||
}
|
||||
export declare class CopyResponse {
|
||||
readonly length: number;
|
||||
readonly name: MessageName;
|
||||
readonly binary: boolean;
|
||||
readonly columnTypes: number[];
|
||||
constructor(length: number, name: MessageName, binary: boolean, columnCount: number);
|
||||
}
|
||||
export declare class Field {
|
||||
readonly name: string;
|
||||
readonly tableID: number;
|
||||
readonly columnID: number;
|
||||
readonly dataTypeID: number;
|
||||
readonly dataTypeSize: number;
|
||||
readonly dataTypeModifier: number;
|
||||
readonly format: Mode;
|
||||
constructor(name: string, tableID: number, columnID: number, dataTypeID: number, dataTypeSize: number, dataTypeModifier: number, format: Mode);
|
||||
}
|
||||
export declare class RowDescriptionMessage {
|
||||
readonly length: number;
|
||||
readonly fieldCount: number;
|
||||
readonly name: MessageName;
|
||||
readonly fields: Field[];
|
||||
constructor(length: number, fieldCount: number);
|
||||
}
|
||||
export declare class ParameterDescriptionMessage {
|
||||
readonly length: number;
|
||||
readonly parameterCount: number;
|
||||
readonly name: MessageName;
|
||||
readonly dataTypeIDs: number[];
|
||||
constructor(length: number, parameterCount: number);
|
||||
}
|
||||
export declare class ParameterStatusMessage {
|
||||
readonly length: number;
|
||||
readonly parameterName: string;
|
||||
readonly parameterValue: string;
|
||||
readonly name: MessageName;
|
||||
constructor(length: number, parameterName: string, parameterValue: string);
|
||||
}
|
||||
export declare class AuthenticationMD5Password implements BackendMessage {
|
||||
readonly length: number;
|
||||
readonly salt: Buffer;
|
||||
readonly name: MessageName;
|
||||
constructor(length: number, salt: Buffer);
|
||||
}
|
||||
export declare class BackendKeyDataMessage {
|
||||
readonly length: number;
|
||||
readonly processID: number;
|
||||
readonly secretKey: number;
|
||||
readonly name: MessageName;
|
||||
constructor(length: number, processID: number, secretKey: number);
|
||||
}
|
||||
export declare class NotificationResponseMessage {
|
||||
readonly length: number;
|
||||
readonly processId: number;
|
||||
readonly channel: string;
|
||||
readonly payload: string;
|
||||
readonly name: MessageName;
|
||||
constructor(length: number, processId: number, channel: string, payload: string);
|
||||
}
|
||||
export declare class ReadyForQueryMessage {
|
||||
readonly length: number;
|
||||
readonly status: string;
|
||||
readonly name: MessageName;
|
||||
constructor(length: number, status: string);
|
||||
}
|
||||
export declare class CommandCompleteMessage {
|
||||
readonly length: number;
|
||||
readonly text: string;
|
||||
readonly name: MessageName;
|
||||
constructor(length: number, text: string);
|
||||
}
|
||||
export declare class DataRowMessage {
|
||||
length: number;
|
||||
fields: any[];
|
||||
readonly fieldCount: number;
|
||||
readonly name: MessageName;
|
||||
constructor(length: number, fields: any[]);
|
||||
}
|
||||
export declare class NoticeMessage implements BackendMessage, NoticeOrError {
|
||||
readonly length: number;
|
||||
readonly message: string | undefined;
|
||||
constructor(length: number, message: string | undefined);
|
||||
readonly name = "notice";
|
||||
severity: string | undefined;
|
||||
code: string | undefined;
|
||||
detail: string | undefined;
|
||||
hint: string | undefined;
|
||||
position: string | undefined;
|
||||
internalPosition: string | undefined;
|
||||
internalQuery: string | undefined;
|
||||
where: string | undefined;
|
||||
schema: string | undefined;
|
||||
table: string | undefined;
|
||||
column: string | undefined;
|
||||
dataType: string | undefined;
|
||||
constraint: string | undefined;
|
||||
file: string | undefined;
|
||||
line: string | undefined;
|
||||
routine: string | undefined;
|
||||
}
|
||||
export {};
|
160
lib/node/node_modules/pg-protocol/dist/messages.js
generated
vendored
Normal file
160
lib/node/node_modules/pg-protocol/dist/messages.js
generated
vendored
Normal file
|
@ -0,0 +1,160 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.NoticeMessage = exports.DataRowMessage = exports.CommandCompleteMessage = exports.ReadyForQueryMessage = exports.NotificationResponseMessage = exports.BackendKeyDataMessage = exports.AuthenticationMD5Password = exports.ParameterStatusMessage = exports.ParameterDescriptionMessage = exports.RowDescriptionMessage = exports.Field = exports.CopyResponse = exports.CopyDataMessage = exports.DatabaseError = exports.copyDone = exports.emptyQuery = exports.replicationStart = exports.portalSuspended = exports.noData = exports.closeComplete = exports.bindComplete = exports.parseComplete = void 0;
|
||||
exports.parseComplete = {
|
||||
name: 'parseComplete',
|
||||
length: 5,
|
||||
};
|
||||
exports.bindComplete = {
|
||||
name: 'bindComplete',
|
||||
length: 5,
|
||||
};
|
||||
exports.closeComplete = {
|
||||
name: 'closeComplete',
|
||||
length: 5,
|
||||
};
|
||||
exports.noData = {
|
||||
name: 'noData',
|
||||
length: 5,
|
||||
};
|
||||
exports.portalSuspended = {
|
||||
name: 'portalSuspended',
|
||||
length: 5,
|
||||
};
|
||||
exports.replicationStart = {
|
||||
name: 'replicationStart',
|
||||
length: 4,
|
||||
};
|
||||
exports.emptyQuery = {
|
||||
name: 'emptyQuery',
|
||||
length: 4,
|
||||
};
|
||||
exports.copyDone = {
|
||||
name: 'copyDone',
|
||||
length: 4,
|
||||
};
|
||||
class DatabaseError extends Error {
|
||||
constructor(message, length, name) {
|
||||
super(message);
|
||||
this.length = length;
|
||||
this.name = name;
|
||||
}
|
||||
}
|
||||
exports.DatabaseError = DatabaseError;
|
||||
class CopyDataMessage {
|
||||
constructor(length, chunk) {
|
||||
this.length = length;
|
||||
this.chunk = chunk;
|
||||
this.name = 'copyData';
|
||||
}
|
||||
}
|
||||
exports.CopyDataMessage = CopyDataMessage;
|
||||
class CopyResponse {
|
||||
constructor(length, name, binary, columnCount) {
|
||||
this.length = length;
|
||||
this.name = name;
|
||||
this.binary = binary;
|
||||
this.columnTypes = new Array(columnCount);
|
||||
}
|
||||
}
|
||||
exports.CopyResponse = CopyResponse;
|
||||
class Field {
|
||||
constructor(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, format) {
|
||||
this.name = name;
|
||||
this.tableID = tableID;
|
||||
this.columnID = columnID;
|
||||
this.dataTypeID = dataTypeID;
|
||||
this.dataTypeSize = dataTypeSize;
|
||||
this.dataTypeModifier = dataTypeModifier;
|
||||
this.format = format;
|
||||
}
|
||||
}
|
||||
exports.Field = Field;
|
||||
class RowDescriptionMessage {
|
||||
constructor(length, fieldCount) {
|
||||
this.length = length;
|
||||
this.fieldCount = fieldCount;
|
||||
this.name = 'rowDescription';
|
||||
this.fields = new Array(this.fieldCount);
|
||||
}
|
||||
}
|
||||
exports.RowDescriptionMessage = RowDescriptionMessage;
|
||||
class ParameterDescriptionMessage {
|
||||
constructor(length, parameterCount) {
|
||||
this.length = length;
|
||||
this.parameterCount = parameterCount;
|
||||
this.name = 'parameterDescription';
|
||||
this.dataTypeIDs = new Array(this.parameterCount);
|
||||
}
|
||||
}
|
||||
exports.ParameterDescriptionMessage = ParameterDescriptionMessage;
|
||||
class ParameterStatusMessage {
|
||||
constructor(length, parameterName, parameterValue) {
|
||||
this.length = length;
|
||||
this.parameterName = parameterName;
|
||||
this.parameterValue = parameterValue;
|
||||
this.name = 'parameterStatus';
|
||||
}
|
||||
}
|
||||
exports.ParameterStatusMessage = ParameterStatusMessage;
|
||||
class AuthenticationMD5Password {
|
||||
constructor(length, salt) {
|
||||
this.length = length;
|
||||
this.salt = salt;
|
||||
this.name = 'authenticationMD5Password';
|
||||
}
|
||||
}
|
||||
exports.AuthenticationMD5Password = AuthenticationMD5Password;
|
||||
class BackendKeyDataMessage {
|
||||
constructor(length, processID, secretKey) {
|
||||
this.length = length;
|
||||
this.processID = processID;
|
||||
this.secretKey = secretKey;
|
||||
this.name = 'backendKeyData';
|
||||
}
|
||||
}
|
||||
exports.BackendKeyDataMessage = BackendKeyDataMessage;
|
||||
class NotificationResponseMessage {
|
||||
constructor(length, processId, channel, payload) {
|
||||
this.length = length;
|
||||
this.processId = processId;
|
||||
this.channel = channel;
|
||||
this.payload = payload;
|
||||
this.name = 'notification';
|
||||
}
|
||||
}
|
||||
exports.NotificationResponseMessage = NotificationResponseMessage;
|
||||
class ReadyForQueryMessage {
|
||||
constructor(length, status) {
|
||||
this.length = length;
|
||||
this.status = status;
|
||||
this.name = 'readyForQuery';
|
||||
}
|
||||
}
|
||||
exports.ReadyForQueryMessage = ReadyForQueryMessage;
|
||||
class CommandCompleteMessage {
|
||||
constructor(length, text) {
|
||||
this.length = length;
|
||||
this.text = text;
|
||||
this.name = 'commandComplete';
|
||||
}
|
||||
}
|
||||
exports.CommandCompleteMessage = CommandCompleteMessage;
|
||||
class DataRowMessage {
|
||||
constructor(length, fields) {
|
||||
this.length = length;
|
||||
this.fields = fields;
|
||||
this.name = 'dataRow';
|
||||
this.fieldCount = fields.length;
|
||||
}
|
||||
}
|
||||
exports.DataRowMessage = DataRowMessage;
|
||||
class NoticeMessage {
|
||||
constructor(length, message) {
|
||||
this.length = length;
|
||||
this.message = message;
|
||||
this.name = 'notice';
|
||||
}
|
||||
}
|
||||
exports.NoticeMessage = NoticeMessage;
|
||||
//# sourceMappingURL=messages.js.map
|
1
lib/node/node_modules/pg-protocol/dist/messages.js.map
generated
vendored
Normal file
1
lib/node/node_modules/pg-protocol/dist/messages.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"messages.js","sourceRoot":"","sources":["../src/messages.ts"],"names":[],"mappings":";;;AAoCa,QAAA,aAAa,GAAmB;IAC3C,IAAI,EAAE,eAAe;IACrB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,YAAY,GAAmB;IAC1C,IAAI,EAAE,cAAc;IACpB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,aAAa,GAAmB;IAC3C,IAAI,EAAE,eAAe;IACrB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,MAAM,GAAmB;IACpC,IAAI,EAAE,QAAQ;IACd,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,eAAe,GAAmB;IAC7C,IAAI,EAAE,iBAAiB;IACvB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,gBAAgB,GAAmB;IAC9C,IAAI,EAAE,kBAAkB;IACxB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,UAAU,GAAmB;IACxC,IAAI,EAAE,YAAY;IAClB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,QAAQ,GAAmB;IACtC,IAAI,EAAE,UAAU;IAChB,MAAM,EAAE,CAAC;CACV,CAAA;AAsBD,MAAa,aAAc,SAAQ,KAAK;IAiBtC,YACE,OAAe,EACC,MAAc,EACd,IAAiB;QAEjC,KAAK,CAAC,OAAO,CAAC,CAAA;QAHE,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAa;IAGnC,CAAC;CACF;AAxBD,sCAwBC;AAED,MAAa,eAAe;IAE1B,YACkB,MAAc,EACd,KAAa;QADb,WAAM,GAAN,MAAM,CAAQ;QACd,UAAK,GAAL,KAAK,CAAQ;QAHf,SAAI,GAAG,UAAU,CAAA;IAI9B,CAAC;CACL;AAND,0CAMC;AAED,MAAa,YAAY;IAEvB,YACkB,MAAc,EACd,IAAiB,EACjB,MAAe,EAC/B,WAAmB;QAHH,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAa;QACjB,WAAM,GAAN,MAAM,CAAS;QAG/B,IAAI,CAAC,WAAW,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,CAAA;IAC3C,CAAC;CACF;AAVD,oCAUC;AAED,MAAa,KAAK;IAChB,YACkB,IAAY,EACZ,OAAe,EACf,QAAgB,EAChB,UAAkB,EAClB,YAAoB,EACpB,gBAAwB,EACxB,MAAY;QANZ,SAAI,GAAJ,IAAI,CAAQ;QACZ,YAAO,GAAP,OAAO,CAAQ;QACf,aAAQ,GAAR,QAAQ,CAAQ;QAChB,eAAU,GAAV,UAAU,CAAQ;QAClB,iBAAY,GAAZ,YAAY,CAAQ;QACpB,qBAAgB,GAAhB,gBAAgB,CAAQ;QACxB,WAAM,GAAN,MAAM,CAAM;IAC3B,CAAC;CACL;AAVD,sBAUC;AAED,MAAa,qBAAqB;IAGhC,YACkB,MAAc,EACd,UAAkB;QADlB,WAAM,GAAN,MAAM,CAAQ;QACd,eAAU,GAAV,UAAU,CAAQ;QAJpB,SAAI,GAAgB,gBAAgB,CAAA;QAMlD,IAAI,CAAC,MAAM,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,CAAA;IAC1C,CAAC;CACF;AATD,sDASC;AAED,MAAa,2BAA2B;IAGtC,YACkB,MAAc,EACd,cAAsB;QADtB,WAAM,GAAN,MAAM,CAAQ;QACd,mBAAc,GAAd,cAAc,CAAQ;QAJxB,SAAI,GAAgB,sBAAsB,CAAA;QAMxD,IAAI,CAAC,WAAW,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC,CAAA;IACnD,CAAC;CACF;AATD,kEASC;AAED,MAAa,sBAAsB;IAEjC,YACkB,MAAc,EACd,aAAqB,EACrB,cAAsB;QAFtB,WAAM,GAAN,MAAM,CAAQ;QACd,kBAAa,GAAb,aAAa,CAAQ;QACrB,mBAAc,GAAd,cAAc,CAAQ;QAJxB,SAAI,GAAgB,iBAAiB,CAAA;IAKlD,CAAC;CACL;AAPD,wDAOC;AAED,MAAa,yBAAyB;IAEpC,YACkB,MAAc,EACd,IAAY;QADZ,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAQ;QAHd,SAAI,GAAgB,2BAA2B,CAAA;IAI5D,CAAC;CACL;AAND,8DAMC;AAED,MAAa,qBAAqB;IAEhC,YACkB,MAAc,EACd,SAAiB,EACjB,SAAiB;QAFjB,WAAM,GAAN,MAAM,CAAQ;QACd,cAAS,GAAT,SAAS,CAAQ;QACjB,cAAS,GAAT,SAAS,CAAQ;QAJnB,SAAI,GAAgB,gBAAgB,CAAA;IAKjD,CAAC;CACL;AAPD,sDAOC;AAED,MAAa,2BAA2B;IAEtC,YACkB,MAAc,EACd,SAAiB,EACjB,OAAe,EACf,OAAe;QAHf,WAAM,GAAN,MAAM,CAAQ;QACd,cAAS,GAAT,SAAS,CAAQ;QACjB,YAAO,GAAP,OAAO,CAAQ;QACf,YAAO,GAAP,OAAO,CAAQ;QALjB,SAAI,GAAgB,cAAc,CAAA;IAM/C,CAAC;CACL;AARD,kEAQC;AAED,MAAa,oBAAoB;IAE/B,YACkB,MAAc,EACd,MAAc;QADd,WAAM,GAAN,MAAM,CAAQ;QACd,WAAM,GAAN,MAAM,CAAQ;QAHhB,SAAI,GAAgB,eAAe,CAAA;IAIhD,CAAC;CACL;AAND,oDAMC;AAED,MAAa,sBAAsB;IAEjC,YACkB,MAAc,EACd,IAAY;QADZ,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAQ;QAHd,SAAI,GAAgB,iBAAiB,CAAA;IAIlD,CAAC;CACL;AAND,wDAMC;AAED,MAAa,cAAc;IAGzB,YACS,MAAc,EACd,MAAa;QADb,WAAM,GAAN,MAAM,CAAQ;QACd,WAAM,GAAN,MAAM,CAAO;QAHN,SAAI,GAAgB,SAAS,CAAA;QAK3C,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,MAAM,CAAA;IACjC,CAAC;CACF;AATD,wCASC;AAED,MAAa,aAAa;IACxB,YACkB,MAAc,EACd,OAA2B;QAD3B,WAAM,GAAN,MAAM,CAAQ;QACd,YAAO,GAAP,OAAO,CAAoB;QAE7B,SAAI,GAAG,QAAQ,CAAA;IAD5B,CAAC;CAkBL;AAtBD,sCAsBC"}
|
1
lib/node/node_modules/pg-protocol/dist/outbound-serializer.test.d.ts
generated
vendored
Normal file
1
lib/node/node_modules/pg-protocol/dist/outbound-serializer.test.d.ts
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
export {};
|
248
lib/node/node_modules/pg-protocol/dist/outbound-serializer.test.js
generated
vendored
Normal file
248
lib/node/node_modules/pg-protocol/dist/outbound-serializer.test.js
generated
vendored
Normal file
|
@ -0,0 +1,248 @@
|
|||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const assert_1 = __importDefault(require("assert"));
|
||||
const serializer_1 = require("./serializer");
|
||||
const buffer_list_1 = __importDefault(require("./testing/buffer-list"));
|
||||
describe('serializer', () => {
|
||||
it('builds startup message', function () {
|
||||
const actual = serializer_1.serialize.startup({
|
||||
user: 'brian',
|
||||
database: 'bang',
|
||||
});
|
||||
assert_1.default.deepEqual(actual, new buffer_list_1.default()
|
||||
.addInt16(3)
|
||||
.addInt16(0)
|
||||
.addCString('user')
|
||||
.addCString('brian')
|
||||
.addCString('database')
|
||||
.addCString('bang')
|
||||
.addCString('client_encoding')
|
||||
.addCString('UTF8')
|
||||
.addCString('')
|
||||
.join(true));
|
||||
});
|
||||
it('builds password message', function () {
|
||||
const actual = serializer_1.serialize.password('!');
|
||||
assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString('!').join(true, 'p'));
|
||||
});
|
||||
it('builds request ssl message', function () {
|
||||
const actual = serializer_1.serialize.requestSsl();
|
||||
const expected = new buffer_list_1.default().addInt32(80877103).join(true);
|
||||
assert_1.default.deepEqual(actual, expected);
|
||||
});
|
||||
it('builds SASLInitialResponseMessage message', function () {
|
||||
const actual = serializer_1.serialize.sendSASLInitialResponseMessage('mech', 'data');
|
||||
assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString('mech').addInt32(4).addString('data').join(true, 'p'));
|
||||
});
|
||||
it('builds SCRAMClientFinalMessage message', function () {
|
||||
const actual = serializer_1.serialize.sendSCRAMClientFinalMessage('data');
|
||||
assert_1.default.deepEqual(actual, new buffer_list_1.default().addString('data').join(true, 'p'));
|
||||
});
|
||||
it('builds query message', function () {
|
||||
var txt = 'select * from boom';
|
||||
const actual = serializer_1.serialize.query(txt);
|
||||
assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString(txt).join(true, 'Q'));
|
||||
});
|
||||
describe('parse message', () => {
|
||||
it('builds parse message', function () {
|
||||
const actual = serializer_1.serialize.parse({ text: '!' });
|
||||
var expected = new buffer_list_1.default().addCString('').addCString('!').addInt16(0).join(true, 'P');
|
||||
assert_1.default.deepEqual(actual, expected);
|
||||
});
|
||||
it('builds parse message with named query', function () {
|
||||
const actual = serializer_1.serialize.parse({
|
||||
name: 'boom',
|
||||
text: 'select * from boom',
|
||||
types: [],
|
||||
});
|
||||
var expected = new buffer_list_1.default().addCString('boom').addCString('select * from boom').addInt16(0).join(true, 'P');
|
||||
assert_1.default.deepEqual(actual, expected);
|
||||
});
|
||||
it('with multiple parameters', function () {
|
||||
const actual = serializer_1.serialize.parse({
|
||||
name: 'force',
|
||||
text: 'select * from bang where name = $1',
|
||||
types: [1, 2, 3, 4],
|
||||
});
|
||||
var expected = new buffer_list_1.default()
|
||||
.addCString('force')
|
||||
.addCString('select * from bang where name = $1')
|
||||
.addInt16(4)
|
||||
.addInt32(1)
|
||||
.addInt32(2)
|
||||
.addInt32(3)
|
||||
.addInt32(4)
|
||||
.join(true, 'P');
|
||||
assert_1.default.deepEqual(actual, expected);
|
||||
});
|
||||
});
|
||||
describe('bind messages', function () {
|
||||
it('with no values', function () {
|
||||
const actual = serializer_1.serialize.bind();
|
||||
var expectedBuffer = new buffer_list_1.default()
|
||||
.addCString('')
|
||||
.addCString('')
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.join(true, 'B');
|
||||
assert_1.default.deepEqual(actual, expectedBuffer);
|
||||
});
|
||||
it('with named statement, portal, and values', function () {
|
||||
const actual = serializer_1.serialize.bind({
|
||||
portal: 'bang',
|
||||
statement: 'woo',
|
||||
values: ['1', 'hi', null, 'zing'],
|
||||
});
|
||||
var expectedBuffer = new buffer_list_1.default()
|
||||
.addCString('bang') // portal name
|
||||
.addCString('woo') // statement name
|
||||
.addInt16(4)
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.addInt16(4)
|
||||
.addInt32(1)
|
||||
.add(Buffer.from('1'))
|
||||
.addInt32(2)
|
||||
.add(Buffer.from('hi'))
|
||||
.addInt32(-1)
|
||||
.addInt32(4)
|
||||
.add(Buffer.from('zing'))
|
||||
.addInt16(0)
|
||||
.join(true, 'B');
|
||||
assert_1.default.deepEqual(actual, expectedBuffer);
|
||||
});
|
||||
});
|
||||
it('with custom valueMapper', function () {
|
||||
const actual = serializer_1.serialize.bind({
|
||||
portal: 'bang',
|
||||
statement: 'woo',
|
||||
values: ['1', 'hi', null, 'zing'],
|
||||
valueMapper: () => null,
|
||||
});
|
||||
var expectedBuffer = new buffer_list_1.default()
|
||||
.addCString('bang') // portal name
|
||||
.addCString('woo') // statement name
|
||||
.addInt16(4)
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.addInt16(4)
|
||||
.addInt32(-1)
|
||||
.addInt32(-1)
|
||||
.addInt32(-1)
|
||||
.addInt32(-1)
|
||||
.addInt16(0)
|
||||
.join(true, 'B');
|
||||
assert_1.default.deepEqual(actual, expectedBuffer);
|
||||
});
|
||||
it('with named statement, portal, and buffer value', function () {
|
||||
const actual = serializer_1.serialize.bind({
|
||||
portal: 'bang',
|
||||
statement: 'woo',
|
||||
values: ['1', 'hi', null, Buffer.from('zing', 'utf8')],
|
||||
});
|
||||
var expectedBuffer = new buffer_list_1.default()
|
||||
.addCString('bang') // portal name
|
||||
.addCString('woo') // statement name
|
||||
.addInt16(4) // value count
|
||||
.addInt16(0) // string
|
||||
.addInt16(0) // string
|
||||
.addInt16(0) // string
|
||||
.addInt16(1) // binary
|
||||
.addInt16(4)
|
||||
.addInt32(1)
|
||||
.add(Buffer.from('1'))
|
||||
.addInt32(2)
|
||||
.add(Buffer.from('hi'))
|
||||
.addInt32(-1)
|
||||
.addInt32(4)
|
||||
.add(Buffer.from('zing', 'utf-8'))
|
||||
.addInt16(0)
|
||||
.join(true, 'B');
|
||||
assert_1.default.deepEqual(actual, expectedBuffer);
|
||||
});
|
||||
describe('builds execute message', function () {
|
||||
it('for unamed portal with no row limit', function () {
|
||||
const actual = serializer_1.serialize.execute();
|
||||
var expectedBuffer = new buffer_list_1.default().addCString('').addInt32(0).join(true, 'E');
|
||||
assert_1.default.deepEqual(actual, expectedBuffer);
|
||||
});
|
||||
it('for named portal with row limit', function () {
|
||||
const actual = serializer_1.serialize.execute({
|
||||
portal: 'my favorite portal',
|
||||
rows: 100,
|
||||
});
|
||||
var expectedBuffer = new buffer_list_1.default().addCString('my favorite portal').addInt32(100).join(true, 'E');
|
||||
assert_1.default.deepEqual(actual, expectedBuffer);
|
||||
});
|
||||
});
|
||||
it('builds flush command', function () {
|
||||
const actual = serializer_1.serialize.flush();
|
||||
var expected = new buffer_list_1.default().join(true, 'H');
|
||||
assert_1.default.deepEqual(actual, expected);
|
||||
});
|
||||
it('builds sync command', function () {
|
||||
const actual = serializer_1.serialize.sync();
|
||||
var expected = new buffer_list_1.default().join(true, 'S');
|
||||
assert_1.default.deepEqual(actual, expected);
|
||||
});
|
||||
it('builds end command', function () {
|
||||
const actual = serializer_1.serialize.end();
|
||||
var expected = Buffer.from([0x58, 0, 0, 0, 4]);
|
||||
assert_1.default.deepEqual(actual, expected);
|
||||
});
|
||||
describe('builds describe command', function () {
|
||||
it('describe statement', function () {
|
||||
const actual = serializer_1.serialize.describe({ type: 'S', name: 'bang' });
|
||||
var expected = new buffer_list_1.default().addChar('S').addCString('bang').join(true, 'D');
|
||||
assert_1.default.deepEqual(actual, expected);
|
||||
});
|
||||
it('describe unnamed portal', function () {
|
||||
const actual = serializer_1.serialize.describe({ type: 'P' });
|
||||
var expected = new buffer_list_1.default().addChar('P').addCString('').join(true, 'D');
|
||||
assert_1.default.deepEqual(actual, expected);
|
||||
});
|
||||
});
|
||||
describe('builds close command', function () {
|
||||
it('describe statement', function () {
|
||||
const actual = serializer_1.serialize.close({ type: 'S', name: 'bang' });
|
||||
var expected = new buffer_list_1.default().addChar('S').addCString('bang').join(true, 'C');
|
||||
assert_1.default.deepEqual(actual, expected);
|
||||
});
|
||||
it('describe unnamed portal', function () {
|
||||
const actual = serializer_1.serialize.close({ type: 'P' });
|
||||
var expected = new buffer_list_1.default().addChar('P').addCString('').join(true, 'C');
|
||||
assert_1.default.deepEqual(actual, expected);
|
||||
});
|
||||
});
|
||||
describe('copy messages', function () {
|
||||
it('builds copyFromChunk', () => {
|
||||
const actual = serializer_1.serialize.copyData(Buffer.from([1, 2, 3]));
|
||||
const expected = new buffer_list_1.default().add(Buffer.from([1, 2, 3])).join(true, 'd');
|
||||
assert_1.default.deepEqual(actual, expected);
|
||||
});
|
||||
it('builds copy fail', () => {
|
||||
const actual = serializer_1.serialize.copyFail('err!');
|
||||
const expected = new buffer_list_1.default().addCString('err!').join(true, 'f');
|
||||
assert_1.default.deepEqual(actual, expected);
|
||||
});
|
||||
it('builds copy done', () => {
|
||||
const actual = serializer_1.serialize.copyDone();
|
||||
const expected = new buffer_list_1.default().join(true, 'c');
|
||||
assert_1.default.deepEqual(actual, expected);
|
||||
});
|
||||
});
|
||||
it('builds cancel message', () => {
|
||||
const actual = serializer_1.serialize.cancel(3, 4);
|
||||
const expected = new buffer_list_1.default().addInt16(1234).addInt16(5678).addInt32(3).addInt32(4).join(true);
|
||||
assert_1.default.deepEqual(actual, expected);
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=outbound-serializer.test.js.map
|
1
lib/node/node_modules/pg-protocol/dist/outbound-serializer.test.js.map
generated
vendored
Normal file
1
lib/node/node_modules/pg-protocol/dist/outbound-serializer.test.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
39
lib/node/node_modules/pg-protocol/dist/parser.d.ts
generated
vendored
Normal file
39
lib/node/node_modules/pg-protocol/dist/parser.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
import { TransformOptions } from 'stream';
|
||||
import { Mode, BackendMessage } from './messages';
|
||||
export declare type Packet = {
|
||||
code: number;
|
||||
packet: Buffer;
|
||||
};
|
||||
declare type StreamOptions = TransformOptions & {
|
||||
mode: Mode;
|
||||
};
|
||||
export declare type MessageCallback = (msg: BackendMessage) => void;
|
||||
export declare class Parser {
|
||||
private buffer;
|
||||
private bufferLength;
|
||||
private bufferOffset;
|
||||
private reader;
|
||||
private mode;
|
||||
constructor(opts?: StreamOptions);
|
||||
parse(buffer: Buffer, callback: MessageCallback): void;
|
||||
private mergeBuffer;
|
||||
private handlePacket;
|
||||
private parseReadyForQueryMessage;
|
||||
private parseCommandCompleteMessage;
|
||||
private parseCopyData;
|
||||
private parseCopyInMessage;
|
||||
private parseCopyOutMessage;
|
||||
private parseCopyMessage;
|
||||
private parseNotificationMessage;
|
||||
private parseRowDescriptionMessage;
|
||||
private parseField;
|
||||
private parseParameterDescriptionMessage;
|
||||
private parseDataRowMessage;
|
||||
private parseParameterStatusMessage;
|
||||
private parseBackendKeyData;
|
||||
parseAuthenticationResponse(offset: number, length: number, bytes: Buffer): any;
|
||||
private parseErrorMessage;
|
||||
}
|
||||
export {};
|
304
lib/node/node_modules/pg-protocol/dist/parser.js
generated
vendored
Normal file
304
lib/node/node_modules/pg-protocol/dist/parser.js
generated
vendored
Normal file
|
@ -0,0 +1,304 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Parser = void 0;
|
||||
const messages_1 = require("./messages");
|
||||
const buffer_reader_1 = require("./buffer-reader");
|
||||
// every message is prefixed with a single bye
|
||||
const CODE_LENGTH = 1;
|
||||
// every message has an int32 length which includes itself but does
|
||||
// NOT include the code in the length
|
||||
const LEN_LENGTH = 4;
|
||||
const HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH;
|
||||
const emptyBuffer = Buffer.allocUnsafe(0);
|
||||
class Parser {
|
||||
constructor(opts) {
|
||||
this.buffer = emptyBuffer;
|
||||
this.bufferLength = 0;
|
||||
this.bufferOffset = 0;
|
||||
this.reader = new buffer_reader_1.BufferReader();
|
||||
if ((opts === null || opts === void 0 ? void 0 : opts.mode) === 'binary') {
|
||||
throw new Error('Binary mode not supported yet');
|
||||
}
|
||||
this.mode = (opts === null || opts === void 0 ? void 0 : opts.mode) || 'text';
|
||||
}
|
||||
parse(buffer, callback) {
|
||||
this.mergeBuffer(buffer);
|
||||
const bufferFullLength = this.bufferOffset + this.bufferLength;
|
||||
let offset = this.bufferOffset;
|
||||
while (offset + HEADER_LENGTH <= bufferFullLength) {
|
||||
// code is 1 byte long - it identifies the message type
|
||||
const code = this.buffer[offset];
|
||||
// length is 1 Uint32BE - it is the length of the message EXCLUDING the code
|
||||
const length = this.buffer.readUInt32BE(offset + CODE_LENGTH);
|
||||
const fullMessageLength = CODE_LENGTH + length;
|
||||
if (fullMessageLength + offset <= bufferFullLength) {
|
||||
const message = this.handlePacket(offset + HEADER_LENGTH, code, length, this.buffer);
|
||||
callback(message);
|
||||
offset += fullMessageLength;
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (offset === bufferFullLength) {
|
||||
// No more use for the buffer
|
||||
this.buffer = emptyBuffer;
|
||||
this.bufferLength = 0;
|
||||
this.bufferOffset = 0;
|
||||
}
|
||||
else {
|
||||
// Adjust the cursors of remainingBuffer
|
||||
this.bufferLength = bufferFullLength - offset;
|
||||
this.bufferOffset = offset;
|
||||
}
|
||||
}
|
||||
mergeBuffer(buffer) {
|
||||
if (this.bufferLength > 0) {
|
||||
const newLength = this.bufferLength + buffer.byteLength;
|
||||
const newFullLength = newLength + this.bufferOffset;
|
||||
if (newFullLength > this.buffer.byteLength) {
|
||||
// We can't concat the new buffer with the remaining one
|
||||
let newBuffer;
|
||||
if (newLength <= this.buffer.byteLength && this.bufferOffset >= this.bufferLength) {
|
||||
// We can move the relevant part to the beginning of the buffer instead of allocating a new buffer
|
||||
newBuffer = this.buffer;
|
||||
}
|
||||
else {
|
||||
// Allocate a new larger buffer
|
||||
let newBufferLength = this.buffer.byteLength * 2;
|
||||
while (newLength >= newBufferLength) {
|
||||
newBufferLength *= 2;
|
||||
}
|
||||
newBuffer = Buffer.allocUnsafe(newBufferLength);
|
||||
}
|
||||
// Move the remaining buffer to the new one
|
||||
this.buffer.copy(newBuffer, 0, this.bufferOffset, this.bufferOffset + this.bufferLength);
|
||||
this.buffer = newBuffer;
|
||||
this.bufferOffset = 0;
|
||||
}
|
||||
// Concat the new buffer with the remaining one
|
||||
buffer.copy(this.buffer, this.bufferOffset + this.bufferLength);
|
||||
this.bufferLength = newLength;
|
||||
}
|
||||
else {
|
||||
this.buffer = buffer;
|
||||
this.bufferOffset = 0;
|
||||
this.bufferLength = buffer.byteLength;
|
||||
}
|
||||
}
|
||||
handlePacket(offset, code, length, bytes) {
|
||||
switch (code) {
|
||||
case 50 /* MessageCodes.BindComplete */:
|
||||
return messages_1.bindComplete;
|
||||
case 49 /* MessageCodes.ParseComplete */:
|
||||
return messages_1.parseComplete;
|
||||
case 51 /* MessageCodes.CloseComplete */:
|
||||
return messages_1.closeComplete;
|
||||
case 110 /* MessageCodes.NoData */:
|
||||
return messages_1.noData;
|
||||
case 115 /* MessageCodes.PortalSuspended */:
|
||||
return messages_1.portalSuspended;
|
||||
case 99 /* MessageCodes.CopyDone */:
|
||||
return messages_1.copyDone;
|
||||
case 87 /* MessageCodes.ReplicationStart */:
|
||||
return messages_1.replicationStart;
|
||||
case 73 /* MessageCodes.EmptyQuery */:
|
||||
return messages_1.emptyQuery;
|
||||
case 68 /* MessageCodes.DataRow */:
|
||||
return this.parseDataRowMessage(offset, length, bytes);
|
||||
case 67 /* MessageCodes.CommandComplete */:
|
||||
return this.parseCommandCompleteMessage(offset, length, bytes);
|
||||
case 90 /* MessageCodes.ReadyForQuery */:
|
||||
return this.parseReadyForQueryMessage(offset, length, bytes);
|
||||
case 65 /* MessageCodes.NotificationResponse */:
|
||||
return this.parseNotificationMessage(offset, length, bytes);
|
||||
case 82 /* MessageCodes.AuthenticationResponse */:
|
||||
return this.parseAuthenticationResponse(offset, length, bytes);
|
||||
case 83 /* MessageCodes.ParameterStatus */:
|
||||
return this.parseParameterStatusMessage(offset, length, bytes);
|
||||
case 75 /* MessageCodes.BackendKeyData */:
|
||||
return this.parseBackendKeyData(offset, length, bytes);
|
||||
case 69 /* MessageCodes.ErrorMessage */:
|
||||
return this.parseErrorMessage(offset, length, bytes, 'error');
|
||||
case 78 /* MessageCodes.NoticeMessage */:
|
||||
return this.parseErrorMessage(offset, length, bytes, 'notice');
|
||||
case 84 /* MessageCodes.RowDescriptionMessage */:
|
||||
return this.parseRowDescriptionMessage(offset, length, bytes);
|
||||
case 116 /* MessageCodes.ParameterDescriptionMessage */:
|
||||
return this.parseParameterDescriptionMessage(offset, length, bytes);
|
||||
case 71 /* MessageCodes.CopyIn */:
|
||||
return this.parseCopyInMessage(offset, length, bytes);
|
||||
case 72 /* MessageCodes.CopyOut */:
|
||||
return this.parseCopyOutMessage(offset, length, bytes);
|
||||
case 100 /* MessageCodes.CopyData */:
|
||||
return this.parseCopyData(offset, length, bytes);
|
||||
default:
|
||||
return new messages_1.DatabaseError('received invalid response: ' + code.toString(16), length, 'error');
|
||||
}
|
||||
}
|
||||
parseReadyForQueryMessage(offset, length, bytes) {
|
||||
this.reader.setBuffer(offset, bytes);
|
||||
const status = this.reader.string(1);
|
||||
return new messages_1.ReadyForQueryMessage(length, status);
|
||||
}
|
||||
parseCommandCompleteMessage(offset, length, bytes) {
|
||||
this.reader.setBuffer(offset, bytes);
|
||||
const text = this.reader.cstring();
|
||||
return new messages_1.CommandCompleteMessage(length, text);
|
||||
}
|
||||
parseCopyData(offset, length, bytes) {
|
||||
const chunk = bytes.slice(offset, offset + (length - 4));
|
||||
return new messages_1.CopyDataMessage(length, chunk);
|
||||
}
|
||||
parseCopyInMessage(offset, length, bytes) {
|
||||
return this.parseCopyMessage(offset, length, bytes, 'copyInResponse');
|
||||
}
|
||||
parseCopyOutMessage(offset, length, bytes) {
|
||||
return this.parseCopyMessage(offset, length, bytes, 'copyOutResponse');
|
||||
}
|
||||
parseCopyMessage(offset, length, bytes, messageName) {
|
||||
this.reader.setBuffer(offset, bytes);
|
||||
const isBinary = this.reader.byte() !== 0;
|
||||
const columnCount = this.reader.int16();
|
||||
const message = new messages_1.CopyResponse(length, messageName, isBinary, columnCount);
|
||||
for (let i = 0; i < columnCount; i++) {
|
||||
message.columnTypes[i] = this.reader.int16();
|
||||
}
|
||||
return message;
|
||||
}
|
||||
parseNotificationMessage(offset, length, bytes) {
|
||||
this.reader.setBuffer(offset, bytes);
|
||||
const processId = this.reader.int32();
|
||||
const channel = this.reader.cstring();
|
||||
const payload = this.reader.cstring();
|
||||
return new messages_1.NotificationResponseMessage(length, processId, channel, payload);
|
||||
}
|
||||
parseRowDescriptionMessage(offset, length, bytes) {
|
||||
this.reader.setBuffer(offset, bytes);
|
||||
const fieldCount = this.reader.int16();
|
||||
const message = new messages_1.RowDescriptionMessage(length, fieldCount);
|
||||
for (let i = 0; i < fieldCount; i++) {
|
||||
message.fields[i] = this.parseField();
|
||||
}
|
||||
return message;
|
||||
}
|
||||
parseField() {
|
||||
const name = this.reader.cstring();
|
||||
const tableID = this.reader.int32();
|
||||
const columnID = this.reader.int16();
|
||||
const dataTypeID = this.reader.int32();
|
||||
const dataTypeSize = this.reader.int16();
|
||||
const dataTypeModifier = this.reader.int32();
|
||||
const mode = this.reader.int16() === 0 ? 'text' : 'binary';
|
||||
return new messages_1.Field(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, mode);
|
||||
}
|
||||
parseParameterDescriptionMessage(offset, length, bytes) {
|
||||
this.reader.setBuffer(offset, bytes);
|
||||
const parameterCount = this.reader.int16();
|
||||
const message = new messages_1.ParameterDescriptionMessage(length, parameterCount);
|
||||
for (let i = 0; i < parameterCount; i++) {
|
||||
message.dataTypeIDs[i] = this.reader.int32();
|
||||
}
|
||||
return message;
|
||||
}
|
||||
parseDataRowMessage(offset, length, bytes) {
|
||||
this.reader.setBuffer(offset, bytes);
|
||||
const fieldCount = this.reader.int16();
|
||||
const fields = new Array(fieldCount);
|
||||
for (let i = 0; i < fieldCount; i++) {
|
||||
const len = this.reader.int32();
|
||||
// a -1 for length means the value of the field is null
|
||||
fields[i] = len === -1 ? null : this.reader.string(len);
|
||||
}
|
||||
return new messages_1.DataRowMessage(length, fields);
|
||||
}
|
||||
parseParameterStatusMessage(offset, length, bytes) {
|
||||
this.reader.setBuffer(offset, bytes);
|
||||
const name = this.reader.cstring();
|
||||
const value = this.reader.cstring();
|
||||
return new messages_1.ParameterStatusMessage(length, name, value);
|
||||
}
|
||||
parseBackendKeyData(offset, length, bytes) {
|
||||
this.reader.setBuffer(offset, bytes);
|
||||
const processID = this.reader.int32();
|
||||
const secretKey = this.reader.int32();
|
||||
return new messages_1.BackendKeyDataMessage(length, processID, secretKey);
|
||||
}
|
||||
parseAuthenticationResponse(offset, length, bytes) {
|
||||
this.reader.setBuffer(offset, bytes);
|
||||
const code = this.reader.int32();
|
||||
// TODO(bmc): maybe better types here
|
||||
const message = {
|
||||
name: 'authenticationOk',
|
||||
length,
|
||||
};
|
||||
switch (code) {
|
||||
case 0: // AuthenticationOk
|
||||
break;
|
||||
case 3: // AuthenticationCleartextPassword
|
||||
if (message.length === 8) {
|
||||
message.name = 'authenticationCleartextPassword';
|
||||
}
|
||||
break;
|
||||
case 5: // AuthenticationMD5Password
|
||||
if (message.length === 12) {
|
||||
message.name = 'authenticationMD5Password';
|
||||
const salt = this.reader.bytes(4);
|
||||
return new messages_1.AuthenticationMD5Password(length, salt);
|
||||
}
|
||||
break;
|
||||
case 10: // AuthenticationSASL
|
||||
message.name = 'authenticationSASL';
|
||||
message.mechanisms = [];
|
||||
let mechanism;
|
||||
do {
|
||||
mechanism = this.reader.cstring();
|
||||
if (mechanism) {
|
||||
message.mechanisms.push(mechanism);
|
||||
}
|
||||
} while (mechanism);
|
||||
break;
|
||||
case 11: // AuthenticationSASLContinue
|
||||
message.name = 'authenticationSASLContinue';
|
||||
message.data = this.reader.string(length - 8);
|
||||
break;
|
||||
case 12: // AuthenticationSASLFinal
|
||||
message.name = 'authenticationSASLFinal';
|
||||
message.data = this.reader.string(length - 8);
|
||||
break;
|
||||
default:
|
||||
throw new Error('Unknown authenticationOk message type ' + code);
|
||||
}
|
||||
return message;
|
||||
}
|
||||
parseErrorMessage(offset, length, bytes, name) {
|
||||
this.reader.setBuffer(offset, bytes);
|
||||
const fields = {};
|
||||
let fieldType = this.reader.string(1);
|
||||
while (fieldType !== '\0') {
|
||||
fields[fieldType] = this.reader.cstring();
|
||||
fieldType = this.reader.string(1);
|
||||
}
|
||||
const messageValue = fields.M;
|
||||
const message = name === 'notice' ? new messages_1.NoticeMessage(length, messageValue) : new messages_1.DatabaseError(messageValue, length, name);
|
||||
message.severity = fields.S;
|
||||
message.code = fields.C;
|
||||
message.detail = fields.D;
|
||||
message.hint = fields.H;
|
||||
message.position = fields.P;
|
||||
message.internalPosition = fields.p;
|
||||
message.internalQuery = fields.q;
|
||||
message.where = fields.W;
|
||||
message.schema = fields.s;
|
||||
message.table = fields.t;
|
||||
message.column = fields.c;
|
||||
message.dataType = fields.d;
|
||||
message.constraint = fields.n;
|
||||
message.file = fields.F;
|
||||
message.line = fields.L;
|
||||
message.routine = fields.R;
|
||||
return message;
|
||||
}
|
||||
}
|
||||
exports.Parser = Parser;
|
||||
//# sourceMappingURL=parser.js.map
|
1
lib/node/node_modules/pg-protocol/dist/parser.js.map
generated
vendored
Normal file
1
lib/node/node_modules/pg-protocol/dist/parser.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
42
lib/node/node_modules/pg-protocol/dist/serializer.d.ts
generated
vendored
Normal file
42
lib/node/node_modules/pg-protocol/dist/serializer.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,42 @@
|
|||
declare type ParseOpts = {
|
||||
name?: string;
|
||||
types?: number[];
|
||||
text: string;
|
||||
};
|
||||
declare type ValueMapper = (param: any, index: number) => any;
|
||||
declare type BindOpts = {
|
||||
portal?: string;
|
||||
binary?: boolean;
|
||||
statement?: string;
|
||||
values?: any[];
|
||||
valueMapper?: ValueMapper;
|
||||
};
|
||||
declare type ExecOpts = {
|
||||
portal?: string;
|
||||
rows?: number;
|
||||
};
|
||||
declare type PortalOpts = {
|
||||
type: 'S' | 'P';
|
||||
name?: string;
|
||||
};
|
||||
declare const serialize: {
|
||||
startup: (opts: Record<string, string>) => Buffer;
|
||||
password: (password: string) => Buffer;
|
||||
requestSsl: () => Buffer;
|
||||
sendSASLInitialResponseMessage: (mechanism: string, initialResponse: string) => Buffer;
|
||||
sendSCRAMClientFinalMessage: (additionalData: string) => Buffer;
|
||||
query: (text: string) => Buffer;
|
||||
parse: (query: ParseOpts) => Buffer;
|
||||
bind: (config?: BindOpts) => Buffer;
|
||||
execute: (config?: ExecOpts) => Buffer;
|
||||
describe: (msg: PortalOpts) => Buffer;
|
||||
close: (msg: PortalOpts) => Buffer;
|
||||
flush: () => Buffer;
|
||||
sync: () => Buffer;
|
||||
end: () => Buffer;
|
||||
copyData: (chunk: Buffer) => Buffer;
|
||||
copyDone: () => Buffer;
|
||||
copyFail: (message: string) => Buffer;
|
||||
cancel: (processID: number, secretKey: number) => Buffer;
|
||||
};
|
||||
export { serialize };
|
189
lib/node/node_modules/pg-protocol/dist/serializer.js
generated
vendored
Normal file
189
lib/node/node_modules/pg-protocol/dist/serializer.js
generated
vendored
Normal file
|
@ -0,0 +1,189 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.serialize = void 0;
|
||||
const buffer_writer_1 = require("./buffer-writer");
|
||||
const writer = new buffer_writer_1.Writer();
|
||||
const startup = (opts) => {
|
||||
// protocol version
|
||||
writer.addInt16(3).addInt16(0);
|
||||
for (const key of Object.keys(opts)) {
|
||||
writer.addCString(key).addCString(opts[key]);
|
||||
}
|
||||
writer.addCString('client_encoding').addCString('UTF8');
|
||||
var bodyBuffer = writer.addCString('').flush();
|
||||
// this message is sent without a code
|
||||
var length = bodyBuffer.length + 4;
|
||||
return new buffer_writer_1.Writer().addInt32(length).add(bodyBuffer).flush();
|
||||
};
|
||||
const requestSsl = () => {
|
||||
const response = Buffer.allocUnsafe(8);
|
||||
response.writeInt32BE(8, 0);
|
||||
response.writeInt32BE(80877103, 4);
|
||||
return response;
|
||||
};
|
||||
const password = (password) => {
|
||||
return writer.addCString(password).flush(112 /* code.startup */);
|
||||
};
|
||||
const sendSASLInitialResponseMessage = function (mechanism, initialResponse) {
|
||||
// 0x70 = 'p'
|
||||
writer.addCString(mechanism).addInt32(Buffer.byteLength(initialResponse)).addString(initialResponse);
|
||||
return writer.flush(112 /* code.startup */);
|
||||
};
|
||||
const sendSCRAMClientFinalMessage = function (additionalData) {
|
||||
return writer.addString(additionalData).flush(112 /* code.startup */);
|
||||
};
|
||||
const query = (text) => {
|
||||
return writer.addCString(text).flush(81 /* code.query */);
|
||||
};
|
||||
const emptyArray = [];
|
||||
const parse = (query) => {
|
||||
// expect something like this:
|
||||
// { name: 'queryName',
|
||||
// text: 'select * from blah',
|
||||
// types: ['int8', 'bool'] }
|
||||
// normalize missing query names to allow for null
|
||||
const name = query.name || '';
|
||||
if (name.length > 63) {
|
||||
/* eslint-disable no-console */
|
||||
console.error('Warning! Postgres only supports 63 characters for query names.');
|
||||
console.error('You supplied %s (%s)', name, name.length);
|
||||
console.error('This can cause conflicts and silent errors executing queries');
|
||||
/* eslint-enable no-console */
|
||||
}
|
||||
const types = query.types || emptyArray;
|
||||
var len = types.length;
|
||||
var buffer = writer
|
||||
.addCString(name) // name of query
|
||||
.addCString(query.text) // actual query text
|
||||
.addInt16(len);
|
||||
for (var i = 0; i < len; i++) {
|
||||
buffer.addInt32(types[i]);
|
||||
}
|
||||
return writer.flush(80 /* code.parse */);
|
||||
};
|
||||
const paramWriter = new buffer_writer_1.Writer();
|
||||
const writeValues = function (values, valueMapper) {
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
const mappedVal = valueMapper ? valueMapper(values[i], i) : values[i];
|
||||
if (mappedVal == null) {
|
||||
// add the param type (string) to the writer
|
||||
writer.addInt16(0 /* ParamType.STRING */);
|
||||
// write -1 to the param writer to indicate null
|
||||
paramWriter.addInt32(-1);
|
||||
}
|
||||
else if (mappedVal instanceof Buffer) {
|
||||
// add the param type (binary) to the writer
|
||||
writer.addInt16(1 /* ParamType.BINARY */);
|
||||
// add the buffer to the param writer
|
||||
paramWriter.addInt32(mappedVal.length);
|
||||
paramWriter.add(mappedVal);
|
||||
}
|
||||
else {
|
||||
// add the param type (string) to the writer
|
||||
writer.addInt16(0 /* ParamType.STRING */);
|
||||
paramWriter.addInt32(Buffer.byteLength(mappedVal));
|
||||
paramWriter.addString(mappedVal);
|
||||
}
|
||||
}
|
||||
};
|
||||
const bind = (config = {}) => {
|
||||
// normalize config
|
||||
const portal = config.portal || '';
|
||||
const statement = config.statement || '';
|
||||
const binary = config.binary || false;
|
||||
const values = config.values || emptyArray;
|
||||
const len = values.length;
|
||||
writer.addCString(portal).addCString(statement);
|
||||
writer.addInt16(len);
|
||||
writeValues(values, config.valueMapper);
|
||||
writer.addInt16(len);
|
||||
writer.add(paramWriter.flush());
|
||||
// format code
|
||||
writer.addInt16(binary ? 1 /* ParamType.BINARY */ : 0 /* ParamType.STRING */);
|
||||
return writer.flush(66 /* code.bind */);
|
||||
};
|
||||
const emptyExecute = Buffer.from([69 /* code.execute */, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00]);
|
||||
const execute = (config) => {
|
||||
// this is the happy path for most queries
|
||||
if (!config || (!config.portal && !config.rows)) {
|
||||
return emptyExecute;
|
||||
}
|
||||
const portal = config.portal || '';
|
||||
const rows = config.rows || 0;
|
||||
const portalLength = Buffer.byteLength(portal);
|
||||
const len = 4 + portalLength + 1 + 4;
|
||||
// one extra bit for code
|
||||
const buff = Buffer.allocUnsafe(1 + len);
|
||||
buff[0] = 69 /* code.execute */;
|
||||
buff.writeInt32BE(len, 1);
|
||||
buff.write(portal, 5, 'utf-8');
|
||||
buff[portalLength + 5] = 0; // null terminate portal cString
|
||||
buff.writeUInt32BE(rows, buff.length - 4);
|
||||
return buff;
|
||||
};
|
||||
const cancel = (processID, secretKey) => {
|
||||
const buffer = Buffer.allocUnsafe(16);
|
||||
buffer.writeInt32BE(16, 0);
|
||||
buffer.writeInt16BE(1234, 4);
|
||||
buffer.writeInt16BE(5678, 6);
|
||||
buffer.writeInt32BE(processID, 8);
|
||||
buffer.writeInt32BE(secretKey, 12);
|
||||
return buffer;
|
||||
};
|
||||
const cstringMessage = (code, string) => {
|
||||
const stringLen = Buffer.byteLength(string);
|
||||
const len = 4 + stringLen + 1;
|
||||
// one extra bit for code
|
||||
const buffer = Buffer.allocUnsafe(1 + len);
|
||||
buffer[0] = code;
|
||||
buffer.writeInt32BE(len, 1);
|
||||
buffer.write(string, 5, 'utf-8');
|
||||
buffer[len] = 0; // null terminate cString
|
||||
return buffer;
|
||||
};
|
||||
const emptyDescribePortal = writer.addCString('P').flush(68 /* code.describe */);
|
||||
const emptyDescribeStatement = writer.addCString('S').flush(68 /* code.describe */);
|
||||
const describe = (msg) => {
|
||||
return msg.name
|
||||
? cstringMessage(68 /* code.describe */, `${msg.type}${msg.name || ''}`)
|
||||
: msg.type === 'P'
|
||||
? emptyDescribePortal
|
||||
: emptyDescribeStatement;
|
||||
};
|
||||
const close = (msg) => {
|
||||
const text = `${msg.type}${msg.name || ''}`;
|
||||
return cstringMessage(67 /* code.close */, text);
|
||||
};
|
||||
const copyData = (chunk) => {
|
||||
return writer.add(chunk).flush(100 /* code.copyFromChunk */);
|
||||
};
|
||||
const copyFail = (message) => {
|
||||
return cstringMessage(102 /* code.copyFail */, message);
|
||||
};
|
||||
const codeOnlyBuffer = (code) => Buffer.from([code, 0x00, 0x00, 0x00, 0x04]);
|
||||
const flushBuffer = codeOnlyBuffer(72 /* code.flush */);
|
||||
const syncBuffer = codeOnlyBuffer(83 /* code.sync */);
|
||||
const endBuffer = codeOnlyBuffer(88 /* code.end */);
|
||||
const copyDoneBuffer = codeOnlyBuffer(99 /* code.copyDone */);
|
||||
const serialize = {
|
||||
startup,
|
||||
password,
|
||||
requestSsl,
|
||||
sendSASLInitialResponseMessage,
|
||||
sendSCRAMClientFinalMessage,
|
||||
query,
|
||||
parse,
|
||||
bind,
|
||||
execute,
|
||||
describe,
|
||||
close,
|
||||
flush: () => flushBuffer,
|
||||
sync: () => syncBuffer,
|
||||
end: () => endBuffer,
|
||||
copyData,
|
||||
copyDone: () => copyDoneBuffer,
|
||||
copyFail,
|
||||
cancel,
|
||||
};
|
||||
exports.serialize = serialize;
|
||||
//# sourceMappingURL=serializer.js.map
|
1
lib/node/node_modules/pg-protocol/dist/serializer.js.map
generated
vendored
Normal file
1
lib/node/node_modules/pg-protocol/dist/serializer.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
35
lib/node/node_modules/pg-protocol/package.json
generated
vendored
Normal file
35
lib/node/node_modules/pg-protocol/package.json
generated
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"name": "pg-protocol",
|
||||
"version": "1.6.1",
|
||||
"description": "The postgres client/server binary protocol, implemented in TypeScript",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@types/chai": "^4.2.7",
|
||||
"@types/mocha": "^5.2.7",
|
||||
"@types/node": "^12.12.21",
|
||||
"chai": "^4.2.0",
|
||||
"chunky": "^0.0.0",
|
||||
"mocha": "^7.1.2",
|
||||
"ts-node": "^8.5.4",
|
||||
"typescript": "^4.0.3"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha dist/**/*.test.js",
|
||||
"build": "tsc",
|
||||
"build:watch": "tsc --watch",
|
||||
"prepublish": "yarn build",
|
||||
"pretest": "yarn build"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/brianc/node-postgres.git",
|
||||
"directory": "packages/pg-protocol"
|
||||
},
|
||||
"files": [
|
||||
"/dist/*{js,ts,map}",
|
||||
"/src"
|
||||
],
|
||||
"gitHead": "b03c071d2d15af259e1e008e9628191c865e58fa"
|
||||
}
|
28
lib/node/node_modules/pg-protocol/src/b.ts
generated
vendored
Normal file
28
lib/node/node_modules/pg-protocol/src/b.ts
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
// file for microbenchmarking
|
||||
|
||||
import { Writer } from './buffer-writer'
|
||||
import { serialize } from './index'
|
||||
import { BufferReader } from './buffer-reader'
|
||||
|
||||
const LOOPS = 1000
|
||||
let count = 0
|
||||
let start = Date.now()
|
||||
const writer = new Writer()
|
||||
|
||||
const reader = new BufferReader()
|
||||
const buffer = Buffer.from([33, 33, 33, 33, 33, 33, 33, 0])
|
||||
|
||||
const run = () => {
|
||||
if (count > LOOPS) {
|
||||
console.log(Date.now() - start)
|
||||
return
|
||||
}
|
||||
count++
|
||||
for (let i = 0; i < LOOPS; i++) {
|
||||
reader.setBuffer(0, buffer)
|
||||
reader.cstring()
|
||||
}
|
||||
setImmediate(run)
|
||||
}
|
||||
|
||||
run()
|
53
lib/node/node_modules/pg-protocol/src/buffer-reader.ts
generated
vendored
Normal file
53
lib/node/node_modules/pg-protocol/src/buffer-reader.ts
generated
vendored
Normal file
|
@ -0,0 +1,53 @@
|
|||
const emptyBuffer = Buffer.allocUnsafe(0)
|
||||
|
||||
export class BufferReader {
|
||||
private buffer: Buffer = emptyBuffer
|
||||
|
||||
// TODO(bmc): support non-utf8 encoding?
|
||||
private encoding: string = 'utf-8'
|
||||
|
||||
constructor(private offset: number = 0) {}
|
||||
|
||||
public setBuffer(offset: number, buffer: Buffer): void {
|
||||
this.offset = offset
|
||||
this.buffer = buffer
|
||||
}
|
||||
|
||||
public int16(): number {
|
||||
const result = this.buffer.readInt16BE(this.offset)
|
||||
this.offset += 2
|
||||
return result
|
||||
}
|
||||
|
||||
public byte(): number {
|
||||
const result = this.buffer[this.offset]
|
||||
this.offset++
|
||||
return result
|
||||
}
|
||||
|
||||
public int32(): number {
|
||||
const result = this.buffer.readInt32BE(this.offset)
|
||||
this.offset += 4
|
||||
return result
|
||||
}
|
||||
|
||||
public string(length: number): string {
|
||||
const result = this.buffer.toString(this.encoding, this.offset, this.offset + length)
|
||||
this.offset += length
|
||||
return result
|
||||
}
|
||||
|
||||
public cstring(): string {
|
||||
const start = this.offset
|
||||
let end = start
|
||||
while (this.buffer[end++] !== 0) {}
|
||||
this.offset = end
|
||||
return this.buffer.toString(this.encoding, start, end - 1)
|
||||
}
|
||||
|
||||
public bytes(length: number): Buffer {
|
||||
const result = this.buffer.slice(this.offset, this.offset + length)
|
||||
this.offset += length
|
||||
return result
|
||||
}
|
||||
}
|
85
lib/node/node_modules/pg-protocol/src/buffer-writer.ts
generated
vendored
Normal file
85
lib/node/node_modules/pg-protocol/src/buffer-writer.ts
generated
vendored
Normal file
|
@ -0,0 +1,85 @@
|
|||
//binary data writer tuned for encoding binary specific to the postgres binary protocol
|
||||
|
||||
export class Writer {
|
||||
private buffer: Buffer
|
||||
private offset: number = 5
|
||||
private headerPosition: number = 0
|
||||
constructor(private size = 256) {
|
||||
this.buffer = Buffer.allocUnsafe(size)
|
||||
}
|
||||
|
||||
private ensure(size: number): void {
|
||||
var remaining = this.buffer.length - this.offset
|
||||
if (remaining < size) {
|
||||
var oldBuffer = this.buffer
|
||||
// exponential growth factor of around ~ 1.5
|
||||
// https://stackoverflow.com/questions/2269063/buffer-growth-strategy
|
||||
var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size
|
||||
this.buffer = Buffer.allocUnsafe(newSize)
|
||||
oldBuffer.copy(this.buffer)
|
||||
}
|
||||
}
|
||||
|
||||
public addInt32(num: number): Writer {
|
||||
this.ensure(4)
|
||||
this.buffer[this.offset++] = (num >>> 24) & 0xff
|
||||
this.buffer[this.offset++] = (num >>> 16) & 0xff
|
||||
this.buffer[this.offset++] = (num >>> 8) & 0xff
|
||||
this.buffer[this.offset++] = (num >>> 0) & 0xff
|
||||
return this
|
||||
}
|
||||
|
||||
public addInt16(num: number): Writer {
|
||||
this.ensure(2)
|
||||
this.buffer[this.offset++] = (num >>> 8) & 0xff
|
||||
this.buffer[this.offset++] = (num >>> 0) & 0xff
|
||||
return this
|
||||
}
|
||||
|
||||
public addCString(string: string): Writer {
|
||||
if (!string) {
|
||||
this.ensure(1)
|
||||
} else {
|
||||
var len = Buffer.byteLength(string)
|
||||
this.ensure(len + 1) // +1 for null terminator
|
||||
this.buffer.write(string, this.offset, 'utf-8')
|
||||
this.offset += len
|
||||
}
|
||||
|
||||
this.buffer[this.offset++] = 0 // null terminator
|
||||
return this
|
||||
}
|
||||
|
||||
public addString(string: string = ''): Writer {
|
||||
var len = Buffer.byteLength(string)
|
||||
this.ensure(len)
|
||||
this.buffer.write(string, this.offset)
|
||||
this.offset += len
|
||||
return this
|
||||
}
|
||||
|
||||
public add(otherBuffer: Buffer): Writer {
|
||||
this.ensure(otherBuffer.length)
|
||||
otherBuffer.copy(this.buffer, this.offset)
|
||||
this.offset += otherBuffer.length
|
||||
return this
|
||||
}
|
||||
|
||||
private join(code?: number): Buffer {
|
||||
if (code) {
|
||||
this.buffer[this.headerPosition] = code
|
||||
//length is everything in this packet minus the code
|
||||
const length = this.offset - (this.headerPosition + 1)
|
||||
this.buffer.writeInt32BE(length, this.headerPosition + 1)
|
||||
}
|
||||
return this.buffer.slice(code ? 0 : 5, this.offset)
|
||||
}
|
||||
|
||||
public flush(code?: number): Buffer {
|
||||
var result = this.join(code)
|
||||
this.offset = 5
|
||||
this.headerPosition = 0
|
||||
this.buffer = Buffer.allocUnsafe(this.size)
|
||||
return result
|
||||
}
|
||||
}
|
557
lib/node/node_modules/pg-protocol/src/inbound-parser.test.ts
generated
vendored
Normal file
557
lib/node/node_modules/pg-protocol/src/inbound-parser.test.ts
generated
vendored
Normal file
|
@ -0,0 +1,557 @@
|
|||
import buffers from './testing/test-buffers'
|
||||
import BufferList from './testing/buffer-list'
|
||||
import { parse } from '.'
|
||||
import assert from 'assert'
|
||||
import { PassThrough } from 'stream'
|
||||
import { BackendMessage } from './messages'
|
||||
|
||||
var authOkBuffer = buffers.authenticationOk()
|
||||
var paramStatusBuffer = buffers.parameterStatus('client_encoding', 'UTF8')
|
||||
var readyForQueryBuffer = buffers.readyForQuery()
|
||||
var backendKeyDataBuffer = buffers.backendKeyData(1, 2)
|
||||
var commandCompleteBuffer = buffers.commandComplete('SELECT 3')
|
||||
var parseCompleteBuffer = buffers.parseComplete()
|
||||
var bindCompleteBuffer = buffers.bindComplete()
|
||||
var portalSuspendedBuffer = buffers.portalSuspended()
|
||||
|
||||
var addRow = function (bufferList: BufferList, name: string, offset: number) {
|
||||
return bufferList
|
||||
.addCString(name) // field name
|
||||
.addInt32(offset++) // table id
|
||||
.addInt16(offset++) // attribute of column number
|
||||
.addInt32(offset++) // objectId of field's data type
|
||||
.addInt16(offset++) // datatype size
|
||||
.addInt32(offset++) // type modifier
|
||||
.addInt16(0) // format code, 0 => text
|
||||
}
|
||||
|
||||
var row1 = {
|
||||
name: 'id',
|
||||
tableID: 1,
|
||||
attributeNumber: 2,
|
||||
dataTypeID: 3,
|
||||
dataTypeSize: 4,
|
||||
typeModifier: 5,
|
||||
formatCode: 0,
|
||||
}
|
||||
var oneRowDescBuff = buffers.rowDescription([row1])
|
||||
row1.name = 'bang'
|
||||
|
||||
var twoRowBuf = buffers.rowDescription([
|
||||
row1,
|
||||
{
|
||||
name: 'whoah',
|
||||
tableID: 10,
|
||||
attributeNumber: 11,
|
||||
dataTypeID: 12,
|
||||
dataTypeSize: 13,
|
||||
typeModifier: 14,
|
||||
formatCode: 0,
|
||||
},
|
||||
])
|
||||
|
||||
var emptyRowFieldBuf = new BufferList().addInt16(0).join(true, 'D')
|
||||
|
||||
var emptyRowFieldBuf = buffers.dataRow([])
|
||||
|
||||
var oneFieldBuf = new BufferList()
|
||||
.addInt16(1) // number of fields
|
||||
.addInt32(5) // length of bytes of fields
|
||||
.addCString('test')
|
||||
.join(true, 'D')
|
||||
|
||||
var oneFieldBuf = buffers.dataRow(['test'])
|
||||
|
||||
var expectedAuthenticationOkayMessage = {
|
||||
name: 'authenticationOk',
|
||||
length: 8,
|
||||
}
|
||||
|
||||
var expectedParameterStatusMessage = {
|
||||
name: 'parameterStatus',
|
||||
parameterName: 'client_encoding',
|
||||
parameterValue: 'UTF8',
|
||||
length: 25,
|
||||
}
|
||||
|
||||
var expectedBackendKeyDataMessage = {
|
||||
name: 'backendKeyData',
|
||||
processID: 1,
|
||||
secretKey: 2,
|
||||
}
|
||||
|
||||
var expectedReadyForQueryMessage = {
|
||||
name: 'readyForQuery',
|
||||
length: 5,
|
||||
status: 'I',
|
||||
}
|
||||
|
||||
var expectedCommandCompleteMessage = {
|
||||
name: 'commandComplete',
|
||||
length: 13,
|
||||
text: 'SELECT 3',
|
||||
}
|
||||
var emptyRowDescriptionBuffer = new BufferList()
|
||||
.addInt16(0) // number of fields
|
||||
.join(true, 'T')
|
||||
|
||||
var expectedEmptyRowDescriptionMessage = {
|
||||
name: 'rowDescription',
|
||||
length: 6,
|
||||
fieldCount: 0,
|
||||
fields: [],
|
||||
}
|
||||
var expectedOneRowMessage = {
|
||||
name: 'rowDescription',
|
||||
length: 27,
|
||||
fieldCount: 1,
|
||||
fields: [
|
||||
{
|
||||
name: 'id',
|
||||
tableID: 1,
|
||||
columnID: 2,
|
||||
dataTypeID: 3,
|
||||
dataTypeSize: 4,
|
||||
dataTypeModifier: 5,
|
||||
format: 'text',
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
var expectedTwoRowMessage = {
|
||||
name: 'rowDescription',
|
||||
length: 53,
|
||||
fieldCount: 2,
|
||||
fields: [
|
||||
{
|
||||
name: 'bang',
|
||||
tableID: 1,
|
||||
columnID: 2,
|
||||
dataTypeID: 3,
|
||||
dataTypeSize: 4,
|
||||
dataTypeModifier: 5,
|
||||
format: 'text',
|
||||
},
|
||||
{
|
||||
name: 'whoah',
|
||||
tableID: 10,
|
||||
columnID: 11,
|
||||
dataTypeID: 12,
|
||||
dataTypeSize: 13,
|
||||
dataTypeModifier: 14,
|
||||
format: 'text',
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
var emptyParameterDescriptionBuffer = new BufferList()
|
||||
.addInt16(0) // number of parameters
|
||||
.join(true, 't')
|
||||
|
||||
var oneParameterDescBuf = buffers.parameterDescription([1111])
|
||||
|
||||
var twoParameterDescBuf = buffers.parameterDescription([2222, 3333])
|
||||
|
||||
var expectedEmptyParameterDescriptionMessage = {
|
||||
name: 'parameterDescription',
|
||||
length: 6,
|
||||
parameterCount: 0,
|
||||
dataTypeIDs: [],
|
||||
}
|
||||
|
||||
var expectedOneParameterMessage = {
|
||||
name: 'parameterDescription',
|
||||
length: 10,
|
||||
parameterCount: 1,
|
||||
dataTypeIDs: [1111],
|
||||
}
|
||||
|
||||
var expectedTwoParameterMessage = {
|
||||
name: 'parameterDescription',
|
||||
length: 14,
|
||||
parameterCount: 2,
|
||||
dataTypeIDs: [2222, 3333],
|
||||
}
|
||||
|
||||
var testForMessage = function (buffer: Buffer, expectedMessage: any) {
|
||||
it('recieves and parses ' + expectedMessage.name, async () => {
|
||||
const messages = await parseBuffers([buffer])
|
||||
const [lastMessage] = messages
|
||||
|
||||
for (const key in expectedMessage) {
|
||||
assert.deepEqual((lastMessage as any)[key], expectedMessage[key])
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
var plainPasswordBuffer = buffers.authenticationCleartextPassword()
|
||||
var md5PasswordBuffer = buffers.authenticationMD5Password()
|
||||
var SASLBuffer = buffers.authenticationSASL()
|
||||
var SASLContinueBuffer = buffers.authenticationSASLContinue()
|
||||
var SASLFinalBuffer = buffers.authenticationSASLFinal()
|
||||
|
||||
var expectedPlainPasswordMessage = {
|
||||
name: 'authenticationCleartextPassword',
|
||||
}
|
||||
|
||||
var expectedMD5PasswordMessage = {
|
||||
name: 'authenticationMD5Password',
|
||||
salt: Buffer.from([1, 2, 3, 4]),
|
||||
}
|
||||
|
||||
var expectedSASLMessage = {
|
||||
name: 'authenticationSASL',
|
||||
mechanisms: ['SCRAM-SHA-256'],
|
||||
}
|
||||
|
||||
var expectedSASLContinueMessage = {
|
||||
name: 'authenticationSASLContinue',
|
||||
data: 'data',
|
||||
}
|
||||
|
||||
var expectedSASLFinalMessage = {
|
||||
name: 'authenticationSASLFinal',
|
||||
data: 'data',
|
||||
}
|
||||
|
||||
var notificationResponseBuffer = buffers.notification(4, 'hi', 'boom')
|
||||
var expectedNotificationResponseMessage = {
|
||||
name: 'notification',
|
||||
processId: 4,
|
||||
channel: 'hi',
|
||||
payload: 'boom',
|
||||
}
|
||||
|
||||
const parseBuffers = async (buffers: Buffer[]): Promise<BackendMessage[]> => {
|
||||
const stream = new PassThrough()
|
||||
for (const buffer of buffers) {
|
||||
stream.write(buffer)
|
||||
}
|
||||
stream.end()
|
||||
const msgs: BackendMessage[] = []
|
||||
await parse(stream, (msg) => msgs.push(msg))
|
||||
return msgs
|
||||
}
|
||||
|
||||
describe('PgPacketStream', function () {
|
||||
testForMessage(authOkBuffer, expectedAuthenticationOkayMessage)
|
||||
testForMessage(plainPasswordBuffer, expectedPlainPasswordMessage)
|
||||
testForMessage(md5PasswordBuffer, expectedMD5PasswordMessage)
|
||||
testForMessage(SASLBuffer, expectedSASLMessage)
|
||||
testForMessage(SASLContinueBuffer, expectedSASLContinueMessage)
|
||||
|
||||
// this exercises a found bug in the parser:
|
||||
// https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084
|
||||
// and adds a test which is deterministic, rather than relying on network packet chunking
|
||||
const extendedSASLContinueBuffer = Buffer.concat([SASLContinueBuffer, Buffer.from([1, 2, 3, 4])])
|
||||
testForMessage(extendedSASLContinueBuffer, expectedSASLContinueMessage)
|
||||
|
||||
testForMessage(SASLFinalBuffer, expectedSASLFinalMessage)
|
||||
|
||||
// this exercises a found bug in the parser:
|
||||
// https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084
|
||||
// and adds a test which is deterministic, rather than relying on network packet chunking
|
||||
const extendedSASLFinalBuffer = Buffer.concat([SASLFinalBuffer, Buffer.from([1, 2, 4, 5])])
|
||||
testForMessage(extendedSASLFinalBuffer, expectedSASLFinalMessage)
|
||||
|
||||
testForMessage(paramStatusBuffer, expectedParameterStatusMessage)
|
||||
testForMessage(backendKeyDataBuffer, expectedBackendKeyDataMessage)
|
||||
testForMessage(readyForQueryBuffer, expectedReadyForQueryMessage)
|
||||
testForMessage(commandCompleteBuffer, expectedCommandCompleteMessage)
|
||||
testForMessage(notificationResponseBuffer, expectedNotificationResponseMessage)
|
||||
testForMessage(buffers.emptyQuery(), {
|
||||
name: 'emptyQuery',
|
||||
length: 4,
|
||||
})
|
||||
|
||||
testForMessage(Buffer.from([0x6e, 0, 0, 0, 4]), {
|
||||
name: 'noData',
|
||||
})
|
||||
|
||||
describe('rowDescription messages', function () {
|
||||
testForMessage(emptyRowDescriptionBuffer, expectedEmptyRowDescriptionMessage)
|
||||
testForMessage(oneRowDescBuff, expectedOneRowMessage)
|
||||
testForMessage(twoRowBuf, expectedTwoRowMessage)
|
||||
})
|
||||
|
||||
describe('parameterDescription messages', function () {
|
||||
testForMessage(emptyParameterDescriptionBuffer, expectedEmptyParameterDescriptionMessage)
|
||||
testForMessage(oneParameterDescBuf, expectedOneParameterMessage)
|
||||
testForMessage(twoParameterDescBuf, expectedTwoParameterMessage)
|
||||
})
|
||||
|
||||
describe('parsing rows', function () {
|
||||
describe('parsing empty row', function () {
|
||||
testForMessage(emptyRowFieldBuf, {
|
||||
name: 'dataRow',
|
||||
fieldCount: 0,
|
||||
})
|
||||
})
|
||||
|
||||
describe('parsing data row with fields', function () {
|
||||
testForMessage(oneFieldBuf, {
|
||||
name: 'dataRow',
|
||||
fieldCount: 1,
|
||||
fields: ['test'],
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('notice message', function () {
|
||||
// this uses the same logic as error message
|
||||
var buff = buffers.notice([{ type: 'C', value: 'code' }])
|
||||
testForMessage(buff, {
|
||||
name: 'notice',
|
||||
code: 'code',
|
||||
})
|
||||
})
|
||||
|
||||
testForMessage(buffers.error([]), {
|
||||
name: 'error',
|
||||
})
|
||||
|
||||
describe('with all the fields', function () {
|
||||
var buffer = buffers.error([
|
||||
{
|
||||
type: 'S',
|
||||
value: 'ERROR',
|
||||
},
|
||||
{
|
||||
type: 'C',
|
||||
value: 'code',
|
||||
},
|
||||
{
|
||||
type: 'M',
|
||||
value: 'message',
|
||||
},
|
||||
{
|
||||
type: 'D',
|
||||
value: 'details',
|
||||
},
|
||||
{
|
||||
type: 'H',
|
||||
value: 'hint',
|
||||
},
|
||||
{
|
||||
type: 'P',
|
||||
value: '100',
|
||||
},
|
||||
{
|
||||
type: 'p',
|
||||
value: '101',
|
||||
},
|
||||
{
|
||||
type: 'q',
|
||||
value: 'query',
|
||||
},
|
||||
{
|
||||
type: 'W',
|
||||
value: 'where',
|
||||
},
|
||||
{
|
||||
type: 'F',
|
||||
value: 'file',
|
||||
},
|
||||
{
|
||||
type: 'L',
|
||||
value: 'line',
|
||||
},
|
||||
{
|
||||
type: 'R',
|
||||
value: 'routine',
|
||||
},
|
||||
{
|
||||
type: 'Z', // ignored
|
||||
value: 'alsdkf',
|
||||
},
|
||||
])
|
||||
|
||||
testForMessage(buffer, {
|
||||
name: 'error',
|
||||
severity: 'ERROR',
|
||||
code: 'code',
|
||||
message: 'message',
|
||||
detail: 'details',
|
||||
hint: 'hint',
|
||||
position: '100',
|
||||
internalPosition: '101',
|
||||
internalQuery: 'query',
|
||||
where: 'where',
|
||||
file: 'file',
|
||||
line: 'line',
|
||||
routine: 'routine',
|
||||
})
|
||||
})
|
||||
|
||||
testForMessage(parseCompleteBuffer, {
|
||||
name: 'parseComplete',
|
||||
})
|
||||
|
||||
testForMessage(bindCompleteBuffer, {
|
||||
name: 'bindComplete',
|
||||
})
|
||||
|
||||
testForMessage(bindCompleteBuffer, {
|
||||
name: 'bindComplete',
|
||||
})
|
||||
|
||||
testForMessage(buffers.closeComplete(), {
|
||||
name: 'closeComplete',
|
||||
})
|
||||
|
||||
describe('parses portal suspended message', function () {
|
||||
testForMessage(portalSuspendedBuffer, {
|
||||
name: 'portalSuspended',
|
||||
})
|
||||
})
|
||||
|
||||
describe('parses replication start message', function () {
|
||||
testForMessage(Buffer.from([0x57, 0x00, 0x00, 0x00, 0x04]), {
|
||||
name: 'replicationStart',
|
||||
length: 4,
|
||||
})
|
||||
})
|
||||
|
||||
describe('copy', () => {
|
||||
testForMessage(buffers.copyIn(0), {
|
||||
name: 'copyInResponse',
|
||||
length: 7,
|
||||
binary: false,
|
||||
columnTypes: [],
|
||||
})
|
||||
|
||||
testForMessage(buffers.copyIn(2), {
|
||||
name: 'copyInResponse',
|
||||
length: 11,
|
||||
binary: false,
|
||||
columnTypes: [0, 1],
|
||||
})
|
||||
|
||||
testForMessage(buffers.copyOut(0), {
|
||||
name: 'copyOutResponse',
|
||||
length: 7,
|
||||
binary: false,
|
||||
columnTypes: [],
|
||||
})
|
||||
|
||||
testForMessage(buffers.copyOut(3), {
|
||||
name: 'copyOutResponse',
|
||||
length: 13,
|
||||
binary: false,
|
||||
columnTypes: [0, 1, 2],
|
||||
})
|
||||
|
||||
testForMessage(buffers.copyDone(), {
|
||||
name: 'copyDone',
|
||||
length: 4,
|
||||
})
|
||||
|
||||
testForMessage(buffers.copyData(Buffer.from([5, 6, 7])), {
|
||||
name: 'copyData',
|
||||
length: 7,
|
||||
chunk: Buffer.from([5, 6, 7]),
|
||||
})
|
||||
})
|
||||
|
||||
// since the data message on a stream can randomly divide the incomming
|
||||
// tcp packets anywhere, we need to make sure we can parse every single
|
||||
// split on a tcp message
|
||||
describe('split buffer, single message parsing', function () {
|
||||
var fullBuffer = buffers.dataRow([null, 'bang', 'zug zug', null, '!'])
|
||||
|
||||
it('parses when full buffer comes in', async function () {
|
||||
const messages = await parseBuffers([fullBuffer])
|
||||
const message = messages[0] as any
|
||||
assert.equal(message.fields.length, 5)
|
||||
assert.equal(message.fields[0], null)
|
||||
assert.equal(message.fields[1], 'bang')
|
||||
assert.equal(message.fields[2], 'zug zug')
|
||||
assert.equal(message.fields[3], null)
|
||||
assert.equal(message.fields[4], '!')
|
||||
})
|
||||
|
||||
var testMessageRecievedAfterSpiltAt = async function (split: number) {
|
||||
var firstBuffer = Buffer.alloc(fullBuffer.length - split)
|
||||
var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length)
|
||||
fullBuffer.copy(firstBuffer, 0, 0)
|
||||
fullBuffer.copy(secondBuffer, 0, firstBuffer.length)
|
||||
const messages = await parseBuffers([fullBuffer])
|
||||
const message = messages[0] as any
|
||||
assert.equal(message.fields.length, 5)
|
||||
assert.equal(message.fields[0], null)
|
||||
assert.equal(message.fields[1], 'bang')
|
||||
assert.equal(message.fields[2], 'zug zug')
|
||||
assert.equal(message.fields[3], null)
|
||||
assert.equal(message.fields[4], '!')
|
||||
}
|
||||
|
||||
it('parses when split in the middle', function () {
|
||||
testMessageRecievedAfterSpiltAt(6)
|
||||
})
|
||||
|
||||
it('parses when split at end', function () {
|
||||
testMessageRecievedAfterSpiltAt(2)
|
||||
})
|
||||
|
||||
it('parses when split at beginning', function () {
|
||||
testMessageRecievedAfterSpiltAt(fullBuffer.length - 2)
|
||||
testMessageRecievedAfterSpiltAt(fullBuffer.length - 1)
|
||||
testMessageRecievedAfterSpiltAt(fullBuffer.length - 5)
|
||||
})
|
||||
})
|
||||
|
||||
describe('split buffer, multiple message parsing', function () {
|
||||
var dataRowBuffer = buffers.dataRow(['!'])
|
||||
var readyForQueryBuffer = buffers.readyForQuery()
|
||||
var fullBuffer = Buffer.alloc(dataRowBuffer.length + readyForQueryBuffer.length)
|
||||
dataRowBuffer.copy(fullBuffer, 0, 0)
|
||||
readyForQueryBuffer.copy(fullBuffer, dataRowBuffer.length, 0)
|
||||
|
||||
var verifyMessages = function (messages: any[]) {
|
||||
assert.strictEqual(messages.length, 2)
|
||||
assert.deepEqual(messages[0], {
|
||||
name: 'dataRow',
|
||||
fieldCount: 1,
|
||||
length: 11,
|
||||
fields: ['!'],
|
||||
})
|
||||
assert.equal(messages[0].fields[0], '!')
|
||||
assert.deepEqual(messages[1], {
|
||||
name: 'readyForQuery',
|
||||
length: 5,
|
||||
status: 'I',
|
||||
})
|
||||
}
|
||||
// sanity check
|
||||
it('recieves both messages when packet is not split', async function () {
|
||||
const messages = await parseBuffers([fullBuffer])
|
||||
verifyMessages(messages)
|
||||
})
|
||||
|
||||
var splitAndVerifyTwoMessages = async function (split: number) {
|
||||
var firstBuffer = Buffer.alloc(fullBuffer.length - split)
|
||||
var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length)
|
||||
fullBuffer.copy(firstBuffer, 0, 0)
|
||||
fullBuffer.copy(secondBuffer, 0, firstBuffer.length)
|
||||
const messages = await parseBuffers([firstBuffer, secondBuffer])
|
||||
verifyMessages(messages)
|
||||
}
|
||||
|
||||
describe('recieves both messages when packet is split', function () {
|
||||
it('in the middle', function () {
|
||||
return splitAndVerifyTwoMessages(11)
|
||||
})
|
||||
it('at the front', function () {
|
||||
return Promise.all([
|
||||
splitAndVerifyTwoMessages(fullBuffer.length - 1),
|
||||
splitAndVerifyTwoMessages(fullBuffer.length - 4),
|
||||
splitAndVerifyTwoMessages(fullBuffer.length - 6),
|
||||
])
|
||||
})
|
||||
|
||||
it('at the end', function () {
|
||||
return Promise.all([splitAndVerifyTwoMessages(8), splitAndVerifyTwoMessages(1)])
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
11
lib/node/node_modules/pg-protocol/src/index.ts
generated
vendored
Normal file
11
lib/node/node_modules/pg-protocol/src/index.ts
generated
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
import { BackendMessage, DatabaseError } from './messages'
|
||||
import { serialize } from './serializer'
|
||||
import { Parser, MessageCallback } from './parser'
|
||||
|
||||
export function parse(stream: NodeJS.ReadableStream, callback: MessageCallback): Promise<void> {
|
||||
const parser = new Parser()
|
||||
stream.on('data', (buffer: Buffer) => parser.parse(buffer, callback))
|
||||
return new Promise((resolve) => stream.on('end', () => resolve()))
|
||||
}
|
||||
|
||||
export { serialize, DatabaseError }
|
262
lib/node/node_modules/pg-protocol/src/messages.ts
generated
vendored
Normal file
262
lib/node/node_modules/pg-protocol/src/messages.ts
generated
vendored
Normal file
|
@ -0,0 +1,262 @@
|
|||
export type Mode = 'text' | 'binary'
|
||||
|
||||
export type MessageName =
|
||||
| 'parseComplete'
|
||||
| 'bindComplete'
|
||||
| 'closeComplete'
|
||||
| 'noData'
|
||||
| 'portalSuspended'
|
||||
| 'replicationStart'
|
||||
| 'emptyQuery'
|
||||
| 'copyDone'
|
||||
| 'copyData'
|
||||
| 'rowDescription'
|
||||
| 'parameterDescription'
|
||||
| 'parameterStatus'
|
||||
| 'backendKeyData'
|
||||
| 'notification'
|
||||
| 'readyForQuery'
|
||||
| 'commandComplete'
|
||||
| 'dataRow'
|
||||
| 'copyInResponse'
|
||||
| 'copyOutResponse'
|
||||
| 'authenticationOk'
|
||||
| 'authenticationMD5Password'
|
||||
| 'authenticationCleartextPassword'
|
||||
| 'authenticationSASL'
|
||||
| 'authenticationSASLContinue'
|
||||
| 'authenticationSASLFinal'
|
||||
| 'error'
|
||||
| 'notice'
|
||||
|
||||
export interface BackendMessage {
|
||||
name: MessageName
|
||||
length: number
|
||||
}
|
||||
|
||||
export const parseComplete: BackendMessage = {
|
||||
name: 'parseComplete',
|
||||
length: 5,
|
||||
}
|
||||
|
||||
export const bindComplete: BackendMessage = {
|
||||
name: 'bindComplete',
|
||||
length: 5,
|
||||
}
|
||||
|
||||
export const closeComplete: BackendMessage = {
|
||||
name: 'closeComplete',
|
||||
length: 5,
|
||||
}
|
||||
|
||||
export const noData: BackendMessage = {
|
||||
name: 'noData',
|
||||
length: 5,
|
||||
}
|
||||
|
||||
export const portalSuspended: BackendMessage = {
|
||||
name: 'portalSuspended',
|
||||
length: 5,
|
||||
}
|
||||
|
||||
export const replicationStart: BackendMessage = {
|
||||
name: 'replicationStart',
|
||||
length: 4,
|
||||
}
|
||||
|
||||
export const emptyQuery: BackendMessage = {
|
||||
name: 'emptyQuery',
|
||||
length: 4,
|
||||
}
|
||||
|
||||
export const copyDone: BackendMessage = {
|
||||
name: 'copyDone',
|
||||
length: 4,
|
||||
}
|
||||
|
||||
interface NoticeOrError {
|
||||
message: string | undefined
|
||||
severity: string | undefined
|
||||
code: string | undefined
|
||||
detail: string | undefined
|
||||
hint: string | undefined
|
||||
position: string | undefined
|
||||
internalPosition: string | undefined
|
||||
internalQuery: string | undefined
|
||||
where: string | undefined
|
||||
schema: string | undefined
|
||||
table: string | undefined
|
||||
column: string | undefined
|
||||
dataType: string | undefined
|
||||
constraint: string | undefined
|
||||
file: string | undefined
|
||||
line: string | undefined
|
||||
routine: string | undefined
|
||||
}
|
||||
|
||||
export class DatabaseError extends Error implements NoticeOrError {
|
||||
public severity: string | undefined
|
||||
public code: string | undefined
|
||||
public detail: string | undefined
|
||||
public hint: string | undefined
|
||||
public position: string | undefined
|
||||
public internalPosition: string | undefined
|
||||
public internalQuery: string | undefined
|
||||
public where: string | undefined
|
||||
public schema: string | undefined
|
||||
public table: string | undefined
|
||||
public column: string | undefined
|
||||
public dataType: string | undefined
|
||||
public constraint: string | undefined
|
||||
public file: string | undefined
|
||||
public line: string | undefined
|
||||
public routine: string | undefined
|
||||
constructor(
|
||||
message: string,
|
||||
public readonly length: number,
|
||||
public readonly name: MessageName
|
||||
) {
|
||||
super(message)
|
||||
}
|
||||
}
|
||||
|
||||
export class CopyDataMessage {
|
||||
public readonly name = 'copyData'
|
||||
constructor(
|
||||
public readonly length: number,
|
||||
public readonly chunk: Buffer
|
||||
) {}
|
||||
}
|
||||
|
||||
export class CopyResponse {
|
||||
public readonly columnTypes: number[]
|
||||
constructor(
|
||||
public readonly length: number,
|
||||
public readonly name: MessageName,
|
||||
public readonly binary: boolean,
|
||||
columnCount: number
|
||||
) {
|
||||
this.columnTypes = new Array(columnCount)
|
||||
}
|
||||
}
|
||||
|
||||
export class Field {
|
||||
constructor(
|
||||
public readonly name: string,
|
||||
public readonly tableID: number,
|
||||
public readonly columnID: number,
|
||||
public readonly dataTypeID: number,
|
||||
public readonly dataTypeSize: number,
|
||||
public readonly dataTypeModifier: number,
|
||||
public readonly format: Mode
|
||||
) {}
|
||||
}
|
||||
|
||||
export class RowDescriptionMessage {
|
||||
public readonly name: MessageName = 'rowDescription'
|
||||
public readonly fields: Field[]
|
||||
constructor(
|
||||
public readonly length: number,
|
||||
public readonly fieldCount: number
|
||||
) {
|
||||
this.fields = new Array(this.fieldCount)
|
||||
}
|
||||
}
|
||||
|
||||
export class ParameterDescriptionMessage {
|
||||
public readonly name: MessageName = 'parameterDescription'
|
||||
public readonly dataTypeIDs: number[]
|
||||
constructor(
|
||||
public readonly length: number,
|
||||
public readonly parameterCount: number
|
||||
) {
|
||||
this.dataTypeIDs = new Array(this.parameterCount)
|
||||
}
|
||||
}
|
||||
|
||||
export class ParameterStatusMessage {
|
||||
public readonly name: MessageName = 'parameterStatus'
|
||||
constructor(
|
||||
public readonly length: number,
|
||||
public readonly parameterName: string,
|
||||
public readonly parameterValue: string
|
||||
) {}
|
||||
}
|
||||
|
||||
export class AuthenticationMD5Password implements BackendMessage {
|
||||
public readonly name: MessageName = 'authenticationMD5Password'
|
||||
constructor(
|
||||
public readonly length: number,
|
||||
public readonly salt: Buffer
|
||||
) {}
|
||||
}
|
||||
|
||||
export class BackendKeyDataMessage {
|
||||
public readonly name: MessageName = 'backendKeyData'
|
||||
constructor(
|
||||
public readonly length: number,
|
||||
public readonly processID: number,
|
||||
public readonly secretKey: number
|
||||
) {}
|
||||
}
|
||||
|
||||
export class NotificationResponseMessage {
|
||||
public readonly name: MessageName = 'notification'
|
||||
constructor(
|
||||
public readonly length: number,
|
||||
public readonly processId: number,
|
||||
public readonly channel: string,
|
||||
public readonly payload: string
|
||||
) {}
|
||||
}
|
||||
|
||||
export class ReadyForQueryMessage {
|
||||
public readonly name: MessageName = 'readyForQuery'
|
||||
constructor(
|
||||
public readonly length: number,
|
||||
public readonly status: string
|
||||
) {}
|
||||
}
|
||||
|
||||
export class CommandCompleteMessage {
|
||||
public readonly name: MessageName = 'commandComplete'
|
||||
constructor(
|
||||
public readonly length: number,
|
||||
public readonly text: string
|
||||
) {}
|
||||
}
|
||||
|
||||
export class DataRowMessage {
|
||||
public readonly fieldCount: number
|
||||
public readonly name: MessageName = 'dataRow'
|
||||
constructor(
|
||||
public length: number,
|
||||
public fields: any[]
|
||||
) {
|
||||
this.fieldCount = fields.length
|
||||
}
|
||||
}
|
||||
|
||||
export class NoticeMessage implements BackendMessage, NoticeOrError {
|
||||
constructor(
|
||||
public readonly length: number,
|
||||
public readonly message: string | undefined
|
||||
) {}
|
||||
public readonly name = 'notice'
|
||||
public severity: string | undefined
|
||||
public code: string | undefined
|
||||
public detail: string | undefined
|
||||
public hint: string | undefined
|
||||
public position: string | undefined
|
||||
public internalPosition: string | undefined
|
||||
public internalQuery: string | undefined
|
||||
public where: string | undefined
|
||||
public schema: string | undefined
|
||||
public table: string | undefined
|
||||
public column: string | undefined
|
||||
public dataType: string | undefined
|
||||
public constraint: string | undefined
|
||||
public file: string | undefined
|
||||
public line: string | undefined
|
||||
public routine: string | undefined
|
||||
}
|
272
lib/node/node_modules/pg-protocol/src/outbound-serializer.test.ts
generated
vendored
Normal file
272
lib/node/node_modules/pg-protocol/src/outbound-serializer.test.ts
generated
vendored
Normal file
|
@ -0,0 +1,272 @@
|
|||
import assert from 'assert'
|
||||
import { serialize } from './serializer'
|
||||
import BufferList from './testing/buffer-list'
|
||||
|
||||
describe('serializer', () => {
|
||||
it('builds startup message', function () {
|
||||
const actual = serialize.startup({
|
||||
user: 'brian',
|
||||
database: 'bang',
|
||||
})
|
||||
assert.deepEqual(
|
||||
actual,
|
||||
new BufferList()
|
||||
.addInt16(3)
|
||||
.addInt16(0)
|
||||
.addCString('user')
|
||||
.addCString('brian')
|
||||
.addCString('database')
|
||||
.addCString('bang')
|
||||
.addCString('client_encoding')
|
||||
.addCString('UTF8')
|
||||
.addCString('')
|
||||
.join(true)
|
||||
)
|
||||
})
|
||||
|
||||
it('builds password message', function () {
|
||||
const actual = serialize.password('!')
|
||||
assert.deepEqual(actual, new BufferList().addCString('!').join(true, 'p'))
|
||||
})
|
||||
|
||||
it('builds request ssl message', function () {
|
||||
const actual = serialize.requestSsl()
|
||||
const expected = new BufferList().addInt32(80877103).join(true)
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('builds SASLInitialResponseMessage message', function () {
|
||||
const actual = serialize.sendSASLInitialResponseMessage('mech', 'data')
|
||||
assert.deepEqual(actual, new BufferList().addCString('mech').addInt32(4).addString('data').join(true, 'p'))
|
||||
})
|
||||
|
||||
it('builds SCRAMClientFinalMessage message', function () {
|
||||
const actual = serialize.sendSCRAMClientFinalMessage('data')
|
||||
assert.deepEqual(actual, new BufferList().addString('data').join(true, 'p'))
|
||||
})
|
||||
|
||||
it('builds query message', function () {
|
||||
var txt = 'select * from boom'
|
||||
const actual = serialize.query(txt)
|
||||
assert.deepEqual(actual, new BufferList().addCString(txt).join(true, 'Q'))
|
||||
})
|
||||
|
||||
describe('parse message', () => {
|
||||
it('builds parse message', function () {
|
||||
const actual = serialize.parse({ text: '!' })
|
||||
var expected = new BufferList().addCString('').addCString('!').addInt16(0).join(true, 'P')
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('builds parse message with named query', function () {
|
||||
const actual = serialize.parse({
|
||||
name: 'boom',
|
||||
text: 'select * from boom',
|
||||
types: [],
|
||||
})
|
||||
var expected = new BufferList().addCString('boom').addCString('select * from boom').addInt16(0).join(true, 'P')
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('with multiple parameters', function () {
|
||||
const actual = serialize.parse({
|
||||
name: 'force',
|
||||
text: 'select * from bang where name = $1',
|
||||
types: [1, 2, 3, 4],
|
||||
})
|
||||
var expected = new BufferList()
|
||||
.addCString('force')
|
||||
.addCString('select * from bang where name = $1')
|
||||
.addInt16(4)
|
||||
.addInt32(1)
|
||||
.addInt32(2)
|
||||
.addInt32(3)
|
||||
.addInt32(4)
|
||||
.join(true, 'P')
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe('bind messages', function () {
|
||||
it('with no values', function () {
|
||||
const actual = serialize.bind()
|
||||
|
||||
var expectedBuffer = new BufferList()
|
||||
.addCString('')
|
||||
.addCString('')
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.join(true, 'B')
|
||||
assert.deepEqual(actual, expectedBuffer)
|
||||
})
|
||||
|
||||
it('with named statement, portal, and values', function () {
|
||||
const actual = serialize.bind({
|
||||
portal: 'bang',
|
||||
statement: 'woo',
|
||||
values: ['1', 'hi', null, 'zing'],
|
||||
})
|
||||
var expectedBuffer = new BufferList()
|
||||
.addCString('bang') // portal name
|
||||
.addCString('woo') // statement name
|
||||
.addInt16(4)
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.addInt16(4)
|
||||
.addInt32(1)
|
||||
.add(Buffer.from('1'))
|
||||
.addInt32(2)
|
||||
.add(Buffer.from('hi'))
|
||||
.addInt32(-1)
|
||||
.addInt32(4)
|
||||
.add(Buffer.from('zing'))
|
||||
.addInt16(0)
|
||||
.join(true, 'B')
|
||||
assert.deepEqual(actual, expectedBuffer)
|
||||
})
|
||||
})
|
||||
|
||||
it('with custom valueMapper', function () {
|
||||
const actual = serialize.bind({
|
||||
portal: 'bang',
|
||||
statement: 'woo',
|
||||
values: ['1', 'hi', null, 'zing'],
|
||||
valueMapper: () => null,
|
||||
})
|
||||
var expectedBuffer = new BufferList()
|
||||
.addCString('bang') // portal name
|
||||
.addCString('woo') // statement name
|
||||
.addInt16(4)
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.addInt16(0)
|
||||
.addInt16(4)
|
||||
.addInt32(-1)
|
||||
.addInt32(-1)
|
||||
.addInt32(-1)
|
||||
.addInt32(-1)
|
||||
.addInt16(0)
|
||||
.join(true, 'B')
|
||||
assert.deepEqual(actual, expectedBuffer)
|
||||
})
|
||||
|
||||
it('with named statement, portal, and buffer value', function () {
|
||||
const actual = serialize.bind({
|
||||
portal: 'bang',
|
||||
statement: 'woo',
|
||||
values: ['1', 'hi', null, Buffer.from('zing', 'utf8')],
|
||||
})
|
||||
var expectedBuffer = new BufferList()
|
||||
.addCString('bang') // portal name
|
||||
.addCString('woo') // statement name
|
||||
.addInt16(4) // value count
|
||||
.addInt16(0) // string
|
||||
.addInt16(0) // string
|
||||
.addInt16(0) // string
|
||||
.addInt16(1) // binary
|
||||
.addInt16(4)
|
||||
.addInt32(1)
|
||||
.add(Buffer.from('1'))
|
||||
.addInt32(2)
|
||||
.add(Buffer.from('hi'))
|
||||
.addInt32(-1)
|
||||
.addInt32(4)
|
||||
.add(Buffer.from('zing', 'utf-8'))
|
||||
.addInt16(0)
|
||||
.join(true, 'B')
|
||||
assert.deepEqual(actual, expectedBuffer)
|
||||
})
|
||||
|
||||
describe('builds execute message', function () {
|
||||
it('for unamed portal with no row limit', function () {
|
||||
const actual = serialize.execute()
|
||||
var expectedBuffer = new BufferList().addCString('').addInt32(0).join(true, 'E')
|
||||
assert.deepEqual(actual, expectedBuffer)
|
||||
})
|
||||
|
||||
it('for named portal with row limit', function () {
|
||||
const actual = serialize.execute({
|
||||
portal: 'my favorite portal',
|
||||
rows: 100,
|
||||
})
|
||||
var expectedBuffer = new BufferList().addCString('my favorite portal').addInt32(100).join(true, 'E')
|
||||
assert.deepEqual(actual, expectedBuffer)
|
||||
})
|
||||
})
|
||||
|
||||
it('builds flush command', function () {
|
||||
const actual = serialize.flush()
|
||||
var expected = new BufferList().join(true, 'H')
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('builds sync command', function () {
|
||||
const actual = serialize.sync()
|
||||
var expected = new BufferList().join(true, 'S')
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('builds end command', function () {
|
||||
const actual = serialize.end()
|
||||
var expected = Buffer.from([0x58, 0, 0, 0, 4])
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
|
||||
describe('builds describe command', function () {
|
||||
it('describe statement', function () {
|
||||
const actual = serialize.describe({ type: 'S', name: 'bang' })
|
||||
var expected = new BufferList().addChar('S').addCString('bang').join(true, 'D')
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('describe unnamed portal', function () {
|
||||
const actual = serialize.describe({ type: 'P' })
|
||||
var expected = new BufferList().addChar('P').addCString('').join(true, 'D')
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe('builds close command', function () {
|
||||
it('describe statement', function () {
|
||||
const actual = serialize.close({ type: 'S', name: 'bang' })
|
||||
var expected = new BufferList().addChar('S').addCString('bang').join(true, 'C')
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('describe unnamed portal', function () {
|
||||
const actual = serialize.close({ type: 'P' })
|
||||
var expected = new BufferList().addChar('P').addCString('').join(true, 'C')
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe('copy messages', function () {
|
||||
it('builds copyFromChunk', () => {
|
||||
const actual = serialize.copyData(Buffer.from([1, 2, 3]))
|
||||
const expected = new BufferList().add(Buffer.from([1, 2, 3])).join(true, 'd')
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('builds copy fail', () => {
|
||||
const actual = serialize.copyFail('err!')
|
||||
const expected = new BufferList().addCString('err!').join(true, 'f')
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('builds copy done', () => {
|
||||
const actual = serialize.copyDone()
|
||||
const expected = new BufferList().join(true, 'c')
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
})
|
||||
|
||||
it('builds cancel message', () => {
|
||||
const actual = serialize.cancel(3, 4)
|
||||
const expected = new BufferList().addInt16(1234).addInt16(5678).addInt32(3).addInt32(4).join(true)
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
})
|
389
lib/node/node_modules/pg-protocol/src/parser.ts
generated
vendored
Normal file
389
lib/node/node_modules/pg-protocol/src/parser.ts
generated
vendored
Normal file
|
@ -0,0 +1,389 @@
|
|||
import { TransformOptions } from 'stream'
|
||||
import {
|
||||
Mode,
|
||||
bindComplete,
|
||||
parseComplete,
|
||||
closeComplete,
|
||||
noData,
|
||||
portalSuspended,
|
||||
copyDone,
|
||||
replicationStart,
|
||||
emptyQuery,
|
||||
ReadyForQueryMessage,
|
||||
CommandCompleteMessage,
|
||||
CopyDataMessage,
|
||||
CopyResponse,
|
||||
NotificationResponseMessage,
|
||||
RowDescriptionMessage,
|
||||
ParameterDescriptionMessage,
|
||||
Field,
|
||||
DataRowMessage,
|
||||
ParameterStatusMessage,
|
||||
BackendKeyDataMessage,
|
||||
DatabaseError,
|
||||
BackendMessage,
|
||||
MessageName,
|
||||
AuthenticationMD5Password,
|
||||
NoticeMessage,
|
||||
} from './messages'
|
||||
import { BufferReader } from './buffer-reader'
|
||||
import assert from 'assert'
|
||||
|
||||
// every message is prefixed with a single bye
|
||||
const CODE_LENGTH = 1
|
||||
// every message has an int32 length which includes itself but does
|
||||
// NOT include the code in the length
|
||||
const LEN_LENGTH = 4
|
||||
|
||||
const HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH
|
||||
|
||||
export type Packet = {
|
||||
code: number
|
||||
packet: Buffer
|
||||
}
|
||||
|
||||
const emptyBuffer = Buffer.allocUnsafe(0)
|
||||
|
||||
type StreamOptions = TransformOptions & {
|
||||
mode: Mode
|
||||
}
|
||||
|
||||
const enum MessageCodes {
|
||||
DataRow = 0x44, // D
|
||||
ParseComplete = 0x31, // 1
|
||||
BindComplete = 0x32, // 2
|
||||
CloseComplete = 0x33, // 3
|
||||
CommandComplete = 0x43, // C
|
||||
ReadyForQuery = 0x5a, // Z
|
||||
NoData = 0x6e, // n
|
||||
NotificationResponse = 0x41, // A
|
||||
AuthenticationResponse = 0x52, // R
|
||||
ParameterStatus = 0x53, // S
|
||||
BackendKeyData = 0x4b, // K
|
||||
ErrorMessage = 0x45, // E
|
||||
NoticeMessage = 0x4e, // N
|
||||
RowDescriptionMessage = 0x54, // T
|
||||
ParameterDescriptionMessage = 0x74, // t
|
||||
PortalSuspended = 0x73, // s
|
||||
ReplicationStart = 0x57, // W
|
||||
EmptyQuery = 0x49, // I
|
||||
CopyIn = 0x47, // G
|
||||
CopyOut = 0x48, // H
|
||||
CopyDone = 0x63, // c
|
||||
CopyData = 0x64, // d
|
||||
}
|
||||
|
||||
export type MessageCallback = (msg: BackendMessage) => void
|
||||
|
||||
export class Parser {
|
||||
private buffer: Buffer = emptyBuffer
|
||||
private bufferLength: number = 0
|
||||
private bufferOffset: number = 0
|
||||
private reader = new BufferReader()
|
||||
private mode: Mode
|
||||
|
||||
constructor(opts?: StreamOptions) {
|
||||
if (opts?.mode === 'binary') {
|
||||
throw new Error('Binary mode not supported yet')
|
||||
}
|
||||
this.mode = opts?.mode || 'text'
|
||||
}
|
||||
|
||||
public parse(buffer: Buffer, callback: MessageCallback) {
|
||||
this.mergeBuffer(buffer)
|
||||
const bufferFullLength = this.bufferOffset + this.bufferLength
|
||||
let offset = this.bufferOffset
|
||||
while (offset + HEADER_LENGTH <= bufferFullLength) {
|
||||
// code is 1 byte long - it identifies the message type
|
||||
const code = this.buffer[offset]
|
||||
// length is 1 Uint32BE - it is the length of the message EXCLUDING the code
|
||||
const length = this.buffer.readUInt32BE(offset + CODE_LENGTH)
|
||||
const fullMessageLength = CODE_LENGTH + length
|
||||
if (fullMessageLength + offset <= bufferFullLength) {
|
||||
const message = this.handlePacket(offset + HEADER_LENGTH, code, length, this.buffer)
|
||||
callback(message)
|
||||
offset += fullMessageLength
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if (offset === bufferFullLength) {
|
||||
// No more use for the buffer
|
||||
this.buffer = emptyBuffer
|
||||
this.bufferLength = 0
|
||||
this.bufferOffset = 0
|
||||
} else {
|
||||
// Adjust the cursors of remainingBuffer
|
||||
this.bufferLength = bufferFullLength - offset
|
||||
this.bufferOffset = offset
|
||||
}
|
||||
}
|
||||
|
||||
private mergeBuffer(buffer: Buffer): void {
|
||||
if (this.bufferLength > 0) {
|
||||
const newLength = this.bufferLength + buffer.byteLength
|
||||
const newFullLength = newLength + this.bufferOffset
|
||||
if (newFullLength > this.buffer.byteLength) {
|
||||
// We can't concat the new buffer with the remaining one
|
||||
let newBuffer: Buffer
|
||||
if (newLength <= this.buffer.byteLength && this.bufferOffset >= this.bufferLength) {
|
||||
// We can move the relevant part to the beginning of the buffer instead of allocating a new buffer
|
||||
newBuffer = this.buffer
|
||||
} else {
|
||||
// Allocate a new larger buffer
|
||||
let newBufferLength = this.buffer.byteLength * 2
|
||||
while (newLength >= newBufferLength) {
|
||||
newBufferLength *= 2
|
||||
}
|
||||
newBuffer = Buffer.allocUnsafe(newBufferLength)
|
||||
}
|
||||
// Move the remaining buffer to the new one
|
||||
this.buffer.copy(newBuffer, 0, this.bufferOffset, this.bufferOffset + this.bufferLength)
|
||||
this.buffer = newBuffer
|
||||
this.bufferOffset = 0
|
||||
}
|
||||
// Concat the new buffer with the remaining one
|
||||
buffer.copy(this.buffer, this.bufferOffset + this.bufferLength)
|
||||
this.bufferLength = newLength
|
||||
} else {
|
||||
this.buffer = buffer
|
||||
this.bufferOffset = 0
|
||||
this.bufferLength = buffer.byteLength
|
||||
}
|
||||
}
|
||||
|
||||
private handlePacket(offset: number, code: number, length: number, bytes: Buffer): BackendMessage {
|
||||
switch (code) {
|
||||
case MessageCodes.BindComplete:
|
||||
return bindComplete
|
||||
case MessageCodes.ParseComplete:
|
||||
return parseComplete
|
||||
case MessageCodes.CloseComplete:
|
||||
return closeComplete
|
||||
case MessageCodes.NoData:
|
||||
return noData
|
||||
case MessageCodes.PortalSuspended:
|
||||
return portalSuspended
|
||||
case MessageCodes.CopyDone:
|
||||
return copyDone
|
||||
case MessageCodes.ReplicationStart:
|
||||
return replicationStart
|
||||
case MessageCodes.EmptyQuery:
|
||||
return emptyQuery
|
||||
case MessageCodes.DataRow:
|
||||
return this.parseDataRowMessage(offset, length, bytes)
|
||||
case MessageCodes.CommandComplete:
|
||||
return this.parseCommandCompleteMessage(offset, length, bytes)
|
||||
case MessageCodes.ReadyForQuery:
|
||||
return this.parseReadyForQueryMessage(offset, length, bytes)
|
||||
case MessageCodes.NotificationResponse:
|
||||
return this.parseNotificationMessage(offset, length, bytes)
|
||||
case MessageCodes.AuthenticationResponse:
|
||||
return this.parseAuthenticationResponse(offset, length, bytes)
|
||||
case MessageCodes.ParameterStatus:
|
||||
return this.parseParameterStatusMessage(offset, length, bytes)
|
||||
case MessageCodes.BackendKeyData:
|
||||
return this.parseBackendKeyData(offset, length, bytes)
|
||||
case MessageCodes.ErrorMessage:
|
||||
return this.parseErrorMessage(offset, length, bytes, 'error')
|
||||
case MessageCodes.NoticeMessage:
|
||||
return this.parseErrorMessage(offset, length, bytes, 'notice')
|
||||
case MessageCodes.RowDescriptionMessage:
|
||||
return this.parseRowDescriptionMessage(offset, length, bytes)
|
||||
case MessageCodes.ParameterDescriptionMessage:
|
||||
return this.parseParameterDescriptionMessage(offset, length, bytes)
|
||||
case MessageCodes.CopyIn:
|
||||
return this.parseCopyInMessage(offset, length, bytes)
|
||||
case MessageCodes.CopyOut:
|
||||
return this.parseCopyOutMessage(offset, length, bytes)
|
||||
case MessageCodes.CopyData:
|
||||
return this.parseCopyData(offset, length, bytes)
|
||||
default:
|
||||
return new DatabaseError('received invalid response: ' + code.toString(16), length, 'error')
|
||||
}
|
||||
}
|
||||
|
||||
private parseReadyForQueryMessage(offset: number, length: number, bytes: Buffer) {
|
||||
this.reader.setBuffer(offset, bytes)
|
||||
const status = this.reader.string(1)
|
||||
return new ReadyForQueryMessage(length, status)
|
||||
}
|
||||
|
||||
private parseCommandCompleteMessage(offset: number, length: number, bytes: Buffer) {
|
||||
this.reader.setBuffer(offset, bytes)
|
||||
const text = this.reader.cstring()
|
||||
return new CommandCompleteMessage(length, text)
|
||||
}
|
||||
|
||||
private parseCopyData(offset: number, length: number, bytes: Buffer) {
|
||||
const chunk = bytes.slice(offset, offset + (length - 4))
|
||||
return new CopyDataMessage(length, chunk)
|
||||
}
|
||||
|
||||
private parseCopyInMessage(offset: number, length: number, bytes: Buffer) {
|
||||
return this.parseCopyMessage(offset, length, bytes, 'copyInResponse')
|
||||
}
|
||||
|
||||
private parseCopyOutMessage(offset: number, length: number, bytes: Buffer) {
|
||||
return this.parseCopyMessage(offset, length, bytes, 'copyOutResponse')
|
||||
}
|
||||
|
||||
private parseCopyMessage(offset: number, length: number, bytes: Buffer, messageName: MessageName) {
|
||||
this.reader.setBuffer(offset, bytes)
|
||||
const isBinary = this.reader.byte() !== 0
|
||||
const columnCount = this.reader.int16()
|
||||
const message = new CopyResponse(length, messageName, isBinary, columnCount)
|
||||
for (let i = 0; i < columnCount; i++) {
|
||||
message.columnTypes[i] = this.reader.int16()
|
||||
}
|
||||
return message
|
||||
}
|
||||
|
||||
private parseNotificationMessage(offset: number, length: number, bytes: Buffer) {
|
||||
this.reader.setBuffer(offset, bytes)
|
||||
const processId = this.reader.int32()
|
||||
const channel = this.reader.cstring()
|
||||
const payload = this.reader.cstring()
|
||||
return new NotificationResponseMessage(length, processId, channel, payload)
|
||||
}
|
||||
|
||||
private parseRowDescriptionMessage(offset: number, length: number, bytes: Buffer) {
|
||||
this.reader.setBuffer(offset, bytes)
|
||||
const fieldCount = this.reader.int16()
|
||||
const message = new RowDescriptionMessage(length, fieldCount)
|
||||
for (let i = 0; i < fieldCount; i++) {
|
||||
message.fields[i] = this.parseField()
|
||||
}
|
||||
return message
|
||||
}
|
||||
|
||||
private parseField(): Field {
|
||||
const name = this.reader.cstring()
|
||||
const tableID = this.reader.int32()
|
||||
const columnID = this.reader.int16()
|
||||
const dataTypeID = this.reader.int32()
|
||||
const dataTypeSize = this.reader.int16()
|
||||
const dataTypeModifier = this.reader.int32()
|
||||
const mode = this.reader.int16() === 0 ? 'text' : 'binary'
|
||||
return new Field(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, mode)
|
||||
}
|
||||
|
||||
private parseParameterDescriptionMessage(offset: number, length: number, bytes: Buffer) {
|
||||
this.reader.setBuffer(offset, bytes)
|
||||
const parameterCount = this.reader.int16()
|
||||
const message = new ParameterDescriptionMessage(length, parameterCount)
|
||||
for (let i = 0; i < parameterCount; i++) {
|
||||
message.dataTypeIDs[i] = this.reader.int32()
|
||||
}
|
||||
return message
|
||||
}
|
||||
|
||||
private parseDataRowMessage(offset: number, length: number, bytes: Buffer) {
|
||||
this.reader.setBuffer(offset, bytes)
|
||||
const fieldCount = this.reader.int16()
|
||||
const fields: any[] = new Array(fieldCount)
|
||||
for (let i = 0; i < fieldCount; i++) {
|
||||
const len = this.reader.int32()
|
||||
// a -1 for length means the value of the field is null
|
||||
fields[i] = len === -1 ? null : this.reader.string(len)
|
||||
}
|
||||
return new DataRowMessage(length, fields)
|
||||
}
|
||||
|
||||
private parseParameterStatusMessage(offset: number, length: number, bytes: Buffer) {
|
||||
this.reader.setBuffer(offset, bytes)
|
||||
const name = this.reader.cstring()
|
||||
const value = this.reader.cstring()
|
||||
return new ParameterStatusMessage(length, name, value)
|
||||
}
|
||||
|
||||
private parseBackendKeyData(offset: number, length: number, bytes: Buffer) {
|
||||
this.reader.setBuffer(offset, bytes)
|
||||
const processID = this.reader.int32()
|
||||
const secretKey = this.reader.int32()
|
||||
return new BackendKeyDataMessage(length, processID, secretKey)
|
||||
}
|
||||
|
||||
public parseAuthenticationResponse(offset: number, length: number, bytes: Buffer) {
|
||||
this.reader.setBuffer(offset, bytes)
|
||||
const code = this.reader.int32()
|
||||
// TODO(bmc): maybe better types here
|
||||
const message: BackendMessage & any = {
|
||||
name: 'authenticationOk',
|
||||
length,
|
||||
}
|
||||
|
||||
switch (code) {
|
||||
case 0: // AuthenticationOk
|
||||
break
|
||||
case 3: // AuthenticationCleartextPassword
|
||||
if (message.length === 8) {
|
||||
message.name = 'authenticationCleartextPassword'
|
||||
}
|
||||
break
|
||||
case 5: // AuthenticationMD5Password
|
||||
if (message.length === 12) {
|
||||
message.name = 'authenticationMD5Password'
|
||||
const salt = this.reader.bytes(4)
|
||||
return new AuthenticationMD5Password(length, salt)
|
||||
}
|
||||
break
|
||||
case 10: // AuthenticationSASL
|
||||
message.name = 'authenticationSASL'
|
||||
message.mechanisms = []
|
||||
let mechanism: string
|
||||
do {
|
||||
mechanism = this.reader.cstring()
|
||||
|
||||
if (mechanism) {
|
||||
message.mechanisms.push(mechanism)
|
||||
}
|
||||
} while (mechanism)
|
||||
break
|
||||
case 11: // AuthenticationSASLContinue
|
||||
message.name = 'authenticationSASLContinue'
|
||||
message.data = this.reader.string(length - 8)
|
||||
break
|
||||
case 12: // AuthenticationSASLFinal
|
||||
message.name = 'authenticationSASLFinal'
|
||||
message.data = this.reader.string(length - 8)
|
||||
break
|
||||
default:
|
||||
throw new Error('Unknown authenticationOk message type ' + code)
|
||||
}
|
||||
return message
|
||||
}
|
||||
|
||||
private parseErrorMessage(offset: number, length: number, bytes: Buffer, name: MessageName) {
|
||||
this.reader.setBuffer(offset, bytes)
|
||||
const fields: Record<string, string> = {}
|
||||
let fieldType = this.reader.string(1)
|
||||
while (fieldType !== '\0') {
|
||||
fields[fieldType] = this.reader.cstring()
|
||||
fieldType = this.reader.string(1)
|
||||
}
|
||||
|
||||
const messageValue = fields.M
|
||||
|
||||
const message =
|
||||
name === 'notice' ? new NoticeMessage(length, messageValue) : new DatabaseError(messageValue, length, name)
|
||||
|
||||
message.severity = fields.S
|
||||
message.code = fields.C
|
||||
message.detail = fields.D
|
||||
message.hint = fields.H
|
||||
message.position = fields.P
|
||||
message.internalPosition = fields.p
|
||||
message.internalQuery = fields.q
|
||||
message.where = fields.W
|
||||
message.schema = fields.s
|
||||
message.table = fields.t
|
||||
message.column = fields.c
|
||||
message.dataType = fields.d
|
||||
message.constraint = fields.n
|
||||
message.file = fields.F
|
||||
message.line = fields.L
|
||||
message.routine = fields.R
|
||||
return message
|
||||
}
|
||||
}
|
274
lib/node/node_modules/pg-protocol/src/serializer.ts
generated
vendored
Normal file
274
lib/node/node_modules/pg-protocol/src/serializer.ts
generated
vendored
Normal file
|
@ -0,0 +1,274 @@
|
|||
import { Writer } from './buffer-writer'
|
||||
|
||||
const enum code {
|
||||
startup = 0x70,
|
||||
query = 0x51,
|
||||
parse = 0x50,
|
||||
bind = 0x42,
|
||||
execute = 0x45,
|
||||
flush = 0x48,
|
||||
sync = 0x53,
|
||||
end = 0x58,
|
||||
close = 0x43,
|
||||
describe = 0x44,
|
||||
copyFromChunk = 0x64,
|
||||
copyDone = 0x63,
|
||||
copyFail = 0x66,
|
||||
}
|
||||
|
||||
const writer = new Writer()
|
||||
|
||||
const startup = (opts: Record<string, string>): Buffer => {
|
||||
// protocol version
|
||||
writer.addInt16(3).addInt16(0)
|
||||
for (const key of Object.keys(opts)) {
|
||||
writer.addCString(key).addCString(opts[key])
|
||||
}
|
||||
|
||||
writer.addCString('client_encoding').addCString('UTF8')
|
||||
|
||||
var bodyBuffer = writer.addCString('').flush()
|
||||
// this message is sent without a code
|
||||
|
||||
var length = bodyBuffer.length + 4
|
||||
|
||||
return new Writer().addInt32(length).add(bodyBuffer).flush()
|
||||
}
|
||||
|
||||
const requestSsl = (): Buffer => {
|
||||
const response = Buffer.allocUnsafe(8)
|
||||
response.writeInt32BE(8, 0)
|
||||
response.writeInt32BE(80877103, 4)
|
||||
return response
|
||||
}
|
||||
|
||||
const password = (password: string): Buffer => {
|
||||
return writer.addCString(password).flush(code.startup)
|
||||
}
|
||||
|
||||
const sendSASLInitialResponseMessage = function (mechanism: string, initialResponse: string): Buffer {
|
||||
// 0x70 = 'p'
|
||||
writer.addCString(mechanism).addInt32(Buffer.byteLength(initialResponse)).addString(initialResponse)
|
||||
|
||||
return writer.flush(code.startup)
|
||||
}
|
||||
|
||||
const sendSCRAMClientFinalMessage = function (additionalData: string): Buffer {
|
||||
return writer.addString(additionalData).flush(code.startup)
|
||||
}
|
||||
|
||||
const query = (text: string): Buffer => {
|
||||
return writer.addCString(text).flush(code.query)
|
||||
}
|
||||
|
||||
type ParseOpts = {
|
||||
name?: string
|
||||
types?: number[]
|
||||
text: string
|
||||
}
|
||||
|
||||
const emptyArray: any[] = []
|
||||
|
||||
const parse = (query: ParseOpts): Buffer => {
|
||||
// expect something like this:
|
||||
// { name: 'queryName',
|
||||
// text: 'select * from blah',
|
||||
// types: ['int8', 'bool'] }
|
||||
|
||||
// normalize missing query names to allow for null
|
||||
const name = query.name || ''
|
||||
if (name.length > 63) {
|
||||
/* eslint-disable no-console */
|
||||
console.error('Warning! Postgres only supports 63 characters for query names.')
|
||||
console.error('You supplied %s (%s)', name, name.length)
|
||||
console.error('This can cause conflicts and silent errors executing queries')
|
||||
/* eslint-enable no-console */
|
||||
}
|
||||
|
||||
const types = query.types || emptyArray
|
||||
|
||||
var len = types.length
|
||||
|
||||
var buffer = writer
|
||||
.addCString(name) // name of query
|
||||
.addCString(query.text) // actual query text
|
||||
.addInt16(len)
|
||||
|
||||
for (var i = 0; i < len; i++) {
|
||||
buffer.addInt32(types[i])
|
||||
}
|
||||
|
||||
return writer.flush(code.parse)
|
||||
}
|
||||
|
||||
type ValueMapper = (param: any, index: number) => any
|
||||
|
||||
type BindOpts = {
|
||||
portal?: string
|
||||
binary?: boolean
|
||||
statement?: string
|
||||
values?: any[]
|
||||
// optional map from JS value to postgres value per parameter
|
||||
valueMapper?: ValueMapper
|
||||
}
|
||||
|
||||
const paramWriter = new Writer()
|
||||
|
||||
// make this a const enum so typescript will inline the value
|
||||
const enum ParamType {
|
||||
STRING = 0,
|
||||
BINARY = 1,
|
||||
}
|
||||
|
||||
const writeValues = function (values: any[], valueMapper?: ValueMapper): void {
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
const mappedVal = valueMapper ? valueMapper(values[i], i) : values[i]
|
||||
if (mappedVal == null) {
|
||||
// add the param type (string) to the writer
|
||||
writer.addInt16(ParamType.STRING)
|
||||
// write -1 to the param writer to indicate null
|
||||
paramWriter.addInt32(-1)
|
||||
} else if (mappedVal instanceof Buffer) {
|
||||
// add the param type (binary) to the writer
|
||||
writer.addInt16(ParamType.BINARY)
|
||||
// add the buffer to the param writer
|
||||
paramWriter.addInt32(mappedVal.length)
|
||||
paramWriter.add(mappedVal)
|
||||
} else {
|
||||
// add the param type (string) to the writer
|
||||
writer.addInt16(ParamType.STRING)
|
||||
paramWriter.addInt32(Buffer.byteLength(mappedVal))
|
||||
paramWriter.addString(mappedVal)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const bind = (config: BindOpts = {}): Buffer => {
|
||||
// normalize config
|
||||
const portal = config.portal || ''
|
||||
const statement = config.statement || ''
|
||||
const binary = config.binary || false
|
||||
const values = config.values || emptyArray
|
||||
const len = values.length
|
||||
|
||||
writer.addCString(portal).addCString(statement)
|
||||
writer.addInt16(len)
|
||||
|
||||
writeValues(values, config.valueMapper)
|
||||
|
||||
writer.addInt16(len)
|
||||
writer.add(paramWriter.flush())
|
||||
|
||||
// format code
|
||||
writer.addInt16(binary ? ParamType.BINARY : ParamType.STRING)
|
||||
return writer.flush(code.bind)
|
||||
}
|
||||
|
||||
type ExecOpts = {
|
||||
portal?: string
|
||||
rows?: number
|
||||
}
|
||||
|
||||
const emptyExecute = Buffer.from([code.execute, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00])
|
||||
|
||||
const execute = (config?: ExecOpts): Buffer => {
|
||||
// this is the happy path for most queries
|
||||
if (!config || (!config.portal && !config.rows)) {
|
||||
return emptyExecute
|
||||
}
|
||||
|
||||
const portal = config.portal || ''
|
||||
const rows = config.rows || 0
|
||||
|
||||
const portalLength = Buffer.byteLength(portal)
|
||||
const len = 4 + portalLength + 1 + 4
|
||||
// one extra bit for code
|
||||
const buff = Buffer.allocUnsafe(1 + len)
|
||||
buff[0] = code.execute
|
||||
buff.writeInt32BE(len, 1)
|
||||
buff.write(portal, 5, 'utf-8')
|
||||
buff[portalLength + 5] = 0 // null terminate portal cString
|
||||
buff.writeUInt32BE(rows, buff.length - 4)
|
||||
return buff
|
||||
}
|
||||
|
||||
const cancel = (processID: number, secretKey: number): Buffer => {
|
||||
const buffer = Buffer.allocUnsafe(16)
|
||||
buffer.writeInt32BE(16, 0)
|
||||
buffer.writeInt16BE(1234, 4)
|
||||
buffer.writeInt16BE(5678, 6)
|
||||
buffer.writeInt32BE(processID, 8)
|
||||
buffer.writeInt32BE(secretKey, 12)
|
||||
return buffer
|
||||
}
|
||||
|
||||
type PortalOpts = {
|
||||
type: 'S' | 'P'
|
||||
name?: string
|
||||
}
|
||||
|
||||
const cstringMessage = (code: code, string: string): Buffer => {
|
||||
const stringLen = Buffer.byteLength(string)
|
||||
const len = 4 + stringLen + 1
|
||||
// one extra bit for code
|
||||
const buffer = Buffer.allocUnsafe(1 + len)
|
||||
buffer[0] = code
|
||||
buffer.writeInt32BE(len, 1)
|
||||
buffer.write(string, 5, 'utf-8')
|
||||
buffer[len] = 0 // null terminate cString
|
||||
return buffer
|
||||
}
|
||||
|
||||
const emptyDescribePortal = writer.addCString('P').flush(code.describe)
|
||||
const emptyDescribeStatement = writer.addCString('S').flush(code.describe)
|
||||
|
||||
const describe = (msg: PortalOpts): Buffer => {
|
||||
return msg.name
|
||||
? cstringMessage(code.describe, `${msg.type}${msg.name || ''}`)
|
||||
: msg.type === 'P'
|
||||
? emptyDescribePortal
|
||||
: emptyDescribeStatement
|
||||
}
|
||||
|
||||
const close = (msg: PortalOpts): Buffer => {
|
||||
const text = `${msg.type}${msg.name || ''}`
|
||||
return cstringMessage(code.close, text)
|
||||
}
|
||||
|
||||
const copyData = (chunk: Buffer): Buffer => {
|
||||
return writer.add(chunk).flush(code.copyFromChunk)
|
||||
}
|
||||
|
||||
const copyFail = (message: string): Buffer => {
|
||||
return cstringMessage(code.copyFail, message)
|
||||
}
|
||||
|
||||
const codeOnlyBuffer = (code: code): Buffer => Buffer.from([code, 0x00, 0x00, 0x00, 0x04])
|
||||
|
||||
const flushBuffer = codeOnlyBuffer(code.flush)
|
||||
const syncBuffer = codeOnlyBuffer(code.sync)
|
||||
const endBuffer = codeOnlyBuffer(code.end)
|
||||
const copyDoneBuffer = codeOnlyBuffer(code.copyDone)
|
||||
|
||||
const serialize = {
|
||||
startup,
|
||||
password,
|
||||
requestSsl,
|
||||
sendSASLInitialResponseMessage,
|
||||
sendSCRAMClientFinalMessage,
|
||||
query,
|
||||
parse,
|
||||
bind,
|
||||
execute,
|
||||
describe,
|
||||
close,
|
||||
flush: () => flushBuffer,
|
||||
sync: () => syncBuffer,
|
||||
end: () => endBuffer,
|
||||
copyData,
|
||||
copyDone: () => copyDoneBuffer,
|
||||
copyFail,
|
||||
cancel,
|
||||
}
|
||||
|
||||
export { serialize }
|
75
lib/node/node_modules/pg-protocol/src/testing/buffer-list.ts
generated
vendored
Normal file
75
lib/node/node_modules/pg-protocol/src/testing/buffer-list.ts
generated
vendored
Normal file
|
@ -0,0 +1,75 @@
|
|||
export default class BufferList {
|
||||
constructor(public buffers: Buffer[] = []) {}
|
||||
|
||||
public add(buffer: Buffer, front?: boolean) {
|
||||
this.buffers[front ? 'unshift' : 'push'](buffer)
|
||||
return this
|
||||
}
|
||||
|
||||
public addInt16(val: number, front?: boolean) {
|
||||
return this.add(Buffer.from([val >>> 8, val >>> 0]), front)
|
||||
}
|
||||
|
||||
public getByteLength(initial?: number) {
|
||||
return this.buffers.reduce(function (previous, current) {
|
||||
return previous + current.length
|
||||
}, initial || 0)
|
||||
}
|
||||
|
||||
public addInt32(val: number, first?: boolean) {
|
||||
return this.add(
|
||||
Buffer.from([(val >>> 24) & 0xff, (val >>> 16) & 0xff, (val >>> 8) & 0xff, (val >>> 0) & 0xff]),
|
||||
first
|
||||
)
|
||||
}
|
||||
|
||||
public addCString(val: string, front?: boolean) {
|
||||
var len = Buffer.byteLength(val)
|
||||
var buffer = Buffer.alloc(len + 1)
|
||||
buffer.write(val)
|
||||
buffer[len] = 0
|
||||
return this.add(buffer, front)
|
||||
}
|
||||
|
||||
public addString(val: string, front?: boolean) {
|
||||
var len = Buffer.byteLength(val)
|
||||
var buffer = Buffer.alloc(len)
|
||||
buffer.write(val)
|
||||
return this.add(buffer, front)
|
||||
}
|
||||
|
||||
public addChar(char: string, first?: boolean) {
|
||||
return this.add(Buffer.from(char, 'utf8'), first)
|
||||
}
|
||||
|
||||
public addByte(byte: number) {
|
||||
return this.add(Buffer.from([byte]))
|
||||
}
|
||||
|
||||
public join(appendLength?: boolean, char?: string): Buffer {
|
||||
var length = this.getByteLength()
|
||||
if (appendLength) {
|
||||
this.addInt32(length + 4, true)
|
||||
return this.join(false, char)
|
||||
}
|
||||
if (char) {
|
||||
this.addChar(char, true)
|
||||
length++
|
||||
}
|
||||
var result = Buffer.alloc(length)
|
||||
var index = 0
|
||||
this.buffers.forEach(function (buffer) {
|
||||
buffer.copy(result, index, 0)
|
||||
index += buffer.length
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
public static concat(): Buffer {
|
||||
var total = new BufferList()
|
||||
for (var i = 0; i < arguments.length; i++) {
|
||||
total.add(arguments[i])
|
||||
}
|
||||
return total.join()
|
||||
}
|
||||
}
|
166
lib/node/node_modules/pg-protocol/src/testing/test-buffers.ts
generated
vendored
Normal file
166
lib/node/node_modules/pg-protocol/src/testing/test-buffers.ts
generated
vendored
Normal file
|
@ -0,0 +1,166 @@
|
|||
// https://www.postgresql.org/docs/current/protocol-message-formats.html
|
||||
import BufferList from './buffer-list'
|
||||
|
||||
const buffers = {
|
||||
readyForQuery: function () {
|
||||
return new BufferList().add(Buffer.from('I')).join(true, 'Z')
|
||||
},
|
||||
|
||||
authenticationOk: function () {
|
||||
return new BufferList().addInt32(0).join(true, 'R')
|
||||
},
|
||||
|
||||
authenticationCleartextPassword: function () {
|
||||
return new BufferList().addInt32(3).join(true, 'R')
|
||||
},
|
||||
|
||||
authenticationMD5Password: function () {
|
||||
return new BufferList()
|
||||
.addInt32(5)
|
||||
.add(Buffer.from([1, 2, 3, 4]))
|
||||
.join(true, 'R')
|
||||
},
|
||||
|
||||
authenticationSASL: function () {
|
||||
return new BufferList().addInt32(10).addCString('SCRAM-SHA-256').addCString('').join(true, 'R')
|
||||
},
|
||||
|
||||
authenticationSASLContinue: function () {
|
||||
return new BufferList().addInt32(11).addString('data').join(true, 'R')
|
||||
},
|
||||
|
||||
authenticationSASLFinal: function () {
|
||||
return new BufferList().addInt32(12).addString('data').join(true, 'R')
|
||||
},
|
||||
|
||||
parameterStatus: function (name: string, value: string) {
|
||||
return new BufferList().addCString(name).addCString(value).join(true, 'S')
|
||||
},
|
||||
|
||||
backendKeyData: function (processID: number, secretKey: number) {
|
||||
return new BufferList().addInt32(processID).addInt32(secretKey).join(true, 'K')
|
||||
},
|
||||
|
||||
commandComplete: function (string: string) {
|
||||
return new BufferList().addCString(string).join(true, 'C')
|
||||
},
|
||||
|
||||
rowDescription: function (fields: any[]) {
|
||||
fields = fields || []
|
||||
var buf = new BufferList()
|
||||
buf.addInt16(fields.length)
|
||||
fields.forEach(function (field) {
|
||||
buf
|
||||
.addCString(field.name)
|
||||
.addInt32(field.tableID || 0)
|
||||
.addInt16(field.attributeNumber || 0)
|
||||
.addInt32(field.dataTypeID || 0)
|
||||
.addInt16(field.dataTypeSize || 0)
|
||||
.addInt32(field.typeModifier || 0)
|
||||
.addInt16(field.formatCode || 0)
|
||||
})
|
||||
return buf.join(true, 'T')
|
||||
},
|
||||
|
||||
parameterDescription: function (dataTypeIDs: number[]) {
|
||||
dataTypeIDs = dataTypeIDs || []
|
||||
var buf = new BufferList()
|
||||
buf.addInt16(dataTypeIDs.length)
|
||||
dataTypeIDs.forEach(function (dataTypeID) {
|
||||
buf.addInt32(dataTypeID)
|
||||
})
|
||||
return buf.join(true, 't')
|
||||
},
|
||||
|
||||
dataRow: function (columns: any[]) {
|
||||
columns = columns || []
|
||||
var buf = new BufferList()
|
||||
buf.addInt16(columns.length)
|
||||
columns.forEach(function (col) {
|
||||
if (col == null) {
|
||||
buf.addInt32(-1)
|
||||
} else {
|
||||
var strBuf = Buffer.from(col, 'utf8')
|
||||
buf.addInt32(strBuf.length)
|
||||
buf.add(strBuf)
|
||||
}
|
||||
})
|
||||
return buf.join(true, 'D')
|
||||
},
|
||||
|
||||
error: function (fields: any) {
|
||||
return buffers.errorOrNotice(fields).join(true, 'E')
|
||||
},
|
||||
|
||||
notice: function (fields: any) {
|
||||
return buffers.errorOrNotice(fields).join(true, 'N')
|
||||
},
|
||||
|
||||
errorOrNotice: function (fields: any) {
|
||||
fields = fields || []
|
||||
var buf = new BufferList()
|
||||
fields.forEach(function (field: any) {
|
||||
buf.addChar(field.type)
|
||||
buf.addCString(field.value)
|
||||
})
|
||||
return buf.add(Buffer.from([0])) // terminator
|
||||
},
|
||||
|
||||
parseComplete: function () {
|
||||
return new BufferList().join(true, '1')
|
||||
},
|
||||
|
||||
bindComplete: function () {
|
||||
return new BufferList().join(true, '2')
|
||||
},
|
||||
|
||||
notification: function (id: number, channel: string, payload: string) {
|
||||
return new BufferList().addInt32(id).addCString(channel).addCString(payload).join(true, 'A')
|
||||
},
|
||||
|
||||
emptyQuery: function () {
|
||||
return new BufferList().join(true, 'I')
|
||||
},
|
||||
|
||||
portalSuspended: function () {
|
||||
return new BufferList().join(true, 's')
|
||||
},
|
||||
|
||||
closeComplete: function () {
|
||||
return new BufferList().join(true, '3')
|
||||
},
|
||||
|
||||
copyIn: function (cols: number) {
|
||||
const list = new BufferList()
|
||||
// text mode
|
||||
.addByte(0)
|
||||
// column count
|
||||
.addInt16(cols)
|
||||
for (let i = 0; i < cols; i++) {
|
||||
list.addInt16(i)
|
||||
}
|
||||
return list.join(true, 'G')
|
||||
},
|
||||
|
||||
copyOut: function (cols: number) {
|
||||
const list = new BufferList()
|
||||
// text mode
|
||||
.addByte(0)
|
||||
// column count
|
||||
.addInt16(cols)
|
||||
for (let i = 0; i < cols; i++) {
|
||||
list.addInt16(i)
|
||||
}
|
||||
return list.join(true, 'H')
|
||||
},
|
||||
|
||||
copyData: function (bytes: Buffer) {
|
||||
return new BufferList().add(bytes).join(true, 'd')
|
||||
},
|
||||
|
||||
copyDone: function () {
|
||||
return new BufferList().join(true, 'c')
|
||||
},
|
||||
}
|
||||
|
||||
export default buffers
|
1
lib/node/node_modules/pg-protocol/src/types/chunky.d.ts
generated
vendored
Normal file
1
lib/node/node_modules/pg-protocol/src/types/chunky.d.ts
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
declare module 'chunky'
|
7
lib/node/node_modules/pg-types/.travis.yml
generated
vendored
Normal file
7
lib/node/node_modules/pg-types/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
language: node_js
|
||||
node_js:
|
||||
- '4'
|
||||
- 'lts/*'
|
||||
- 'node'
|
||||
env:
|
||||
- PGUSER=postgres
|
14
lib/node/node_modules/pg-types/Makefile
generated
vendored
Normal file
14
lib/node/node_modules/pg-types/Makefile
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
.PHONY: publish-patch test
|
||||
|
||||
test:
|
||||
npm test
|
||||
|
||||
patch: test
|
||||
npm version patch -m "Bump version"
|
||||
git push origin master --tags
|
||||
npm publish
|
||||
|
||||
minor: test
|
||||
npm version minor -m "Bump version"
|
||||
git push origin master --tags
|
||||
npm publish
|
75
lib/node/node_modules/pg-types/README.md
generated
vendored
Normal file
75
lib/node/node_modules/pg-types/README.md
generated
vendored
Normal file
|
@ -0,0 +1,75 @@
|
|||
# pg-types
|
||||
|
||||
This is the code that turns all the raw text from postgres into JavaScript types for [node-postgres](https://github.com/brianc/node-postgres.git)
|
||||
|
||||
## use
|
||||
|
||||
This module is consumed and exported from the root `pg` object of node-postgres. To access it, do the following:
|
||||
|
||||
```js
|
||||
var types = require('pg').types
|
||||
```
|
||||
|
||||
Generally what you'll want to do is override how a specific data-type is parsed and turned into a JavaScript type. By default the PostgreSQL backend server returns everything as strings. Every data type corresponds to a unique `OID` within the server, and these `OIDs` are sent back with the query response. So, you need to match a particluar `OID` to a function you'd like to use to take the raw text input and produce a valid JavaScript object as a result. `null` values are never parsed.
|
||||
|
||||
Let's do something I commonly like to do on projects: return 64-bit integers `(int8)` as JavaScript integers. Because JavaScript doesn't have support for 64-bit integers node-postgres cannot confidently parse `int8` data type results as numbers because if you have a _huge_ number it will overflow and the result you'd get back from node-postgres would not be the result in the datbase. That would be a __very bad thing__ so node-postgres just returns `int8` results as strings and leaves the parsing up to you. Let's say that you know you don't and wont ever have numbers greater than `int4` in your database, but you're tired of recieving results from the `COUNT(*)` function as strings (because that function returns `int8`). You would do this:
|
||||
|
||||
```js
|
||||
var types = require('pg').types
|
||||
types.setTypeParser(20, function(val) {
|
||||
return parseInt(val)
|
||||
})
|
||||
```
|
||||
|
||||
__boom__: now you get numbers instead of strings.
|
||||
|
||||
Just as another example -- not saying this is a good idea -- let's say you want to return all dates from your database as [moment](http://momentjs.com/docs/) objects. Okay, do this:
|
||||
|
||||
```js
|
||||
var types = require('pg').types
|
||||
var moment = require('moment')
|
||||
var parseFn = function(val) {
|
||||
return val === null ? null : moment(val)
|
||||
}
|
||||
types.setTypeParser(types.builtins.TIMESTAMPTZ, parseFn)
|
||||
types.setTypeParser(types.builtins.TIMESTAMP, parseFn)
|
||||
```
|
||||
_note: I've never done that with my dates, and I'm not 100% sure moment can parse all the date strings returned from postgres. It's just an example!_
|
||||
|
||||
If you're thinking "gee, this seems pretty handy, but how can I get a list of all the OIDs in the database and what they correspond to?!?!?!" worry not:
|
||||
|
||||
```bash
|
||||
$ psql -c "select typname, oid, typarray from pg_type order by oid"
|
||||
```
|
||||
|
||||
If you want to find out the OID of a specific type:
|
||||
|
||||
```bash
|
||||
$ psql -c "select typname, oid, typarray from pg_type where typname = 'daterange' order by oid"
|
||||
```
|
||||
|
||||
:smile:
|
||||
|
||||
## license
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Brian M. Carlson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
137
lib/node/node_modules/pg-types/index.d.ts
generated
vendored
Normal file
137
lib/node/node_modules/pg-types/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,137 @@
|
|||
export enum TypeId {
|
||||
BOOL = 16,
|
||||
BYTEA = 17,
|
||||
CHAR = 18,
|
||||
INT8 = 20,
|
||||
INT2 = 21,
|
||||
INT4 = 23,
|
||||
REGPROC = 24,
|
||||
TEXT = 25,
|
||||
OID = 26,
|
||||
TID = 27,
|
||||
XID = 28,
|
||||
CID = 29,
|
||||
JSON = 114,
|
||||
XML = 142,
|
||||
PG_NODE_TREE = 194,
|
||||
SMGR = 210,
|
||||
PATH = 602,
|
||||
POLYGON = 604,
|
||||
CIDR = 650,
|
||||
FLOAT4 = 700,
|
||||
FLOAT8 = 701,
|
||||
ABSTIME = 702,
|
||||
RELTIME = 703,
|
||||
TINTERVAL = 704,
|
||||
CIRCLE = 718,
|
||||
MACADDR8 = 774,
|
||||
MONEY = 790,
|
||||
MACADDR = 829,
|
||||
INET = 869,
|
||||
ACLITEM = 1033,
|
||||
BPCHAR = 1042,
|
||||
VARCHAR = 1043,
|
||||
DATE = 1082,
|
||||
TIME = 1083,
|
||||
TIMESTAMP = 1114,
|
||||
TIMESTAMPTZ = 1184,
|
||||
INTERVAL = 1186,
|
||||
TIMETZ = 1266,
|
||||
BIT = 1560,
|
||||
VARBIT = 1562,
|
||||
NUMERIC = 1700,
|
||||
REFCURSOR = 1790,
|
||||
REGPROCEDURE = 2202,
|
||||
REGOPER = 2203,
|
||||
REGOPERATOR = 2204,
|
||||
REGCLASS = 2205,
|
||||
REGTYPE = 2206,
|
||||
UUID = 2950,
|
||||
TXID_SNAPSHOT = 2970,
|
||||
PG_LSN = 3220,
|
||||
PG_NDISTINCT = 3361,
|
||||
PG_DEPENDENCIES = 3402,
|
||||
TSVECTOR = 3614,
|
||||
TSQUERY = 3615,
|
||||
GTSVECTOR = 3642,
|
||||
REGCONFIG = 3734,
|
||||
REGDICTIONARY = 3769,
|
||||
JSONB = 3802,
|
||||
REGNAMESPACE = 4089,
|
||||
REGROLE = 4096
|
||||
}
|
||||
|
||||
export type builtinsTypes =
|
||||
'BOOL' |
|
||||
'BYTEA' |
|
||||
'CHAR' |
|
||||
'INT8' |
|
||||
'INT2' |
|
||||
'INT4' |
|
||||
'REGPROC' |
|
||||
'TEXT' |
|
||||
'OID' |
|
||||
'TID' |
|
||||
'XID' |
|
||||
'CID' |
|
||||
'JSON' |
|
||||
'XML' |
|
||||
'PG_NODE_TREE' |
|
||||
'SMGR' |
|
||||
'PATH' |
|
||||
'POLYGON' |
|
||||
'CIDR' |
|
||||
'FLOAT4' |
|
||||
'FLOAT8' |
|
||||
'ABSTIME' |
|
||||
'RELTIME' |
|
||||
'TINTERVAL' |
|
||||
'CIRCLE' |
|
||||
'MACADDR8' |
|
||||
'MONEY' |
|
||||
'MACADDR' |
|
||||
'INET' |
|
||||
'ACLITEM' |
|
||||
'BPCHAR' |
|
||||
'VARCHAR' |
|
||||
'DATE' |
|
||||
'TIME' |
|
||||
'TIMESTAMP' |
|
||||
'TIMESTAMPTZ' |
|
||||
'INTERVAL' |
|
||||
'TIMETZ' |
|
||||
'BIT' |
|
||||
'VARBIT' |
|
||||
'NUMERIC' |
|
||||
'REFCURSOR' |
|
||||
'REGPROCEDURE' |
|
||||
'REGOPER' |
|
||||
'REGOPERATOR' |
|
||||
'REGCLASS' |
|
||||
'REGTYPE' |
|
||||
'UUID' |
|
||||
'TXID_SNAPSHOT' |
|
||||
'PG_LSN' |
|
||||
'PG_NDISTINCT' |
|
||||
'PG_DEPENDENCIES' |
|
||||
'TSVECTOR' |
|
||||
'TSQUERY' |
|
||||
'GTSVECTOR' |
|
||||
'REGCONFIG' |
|
||||
'REGDICTIONARY' |
|
||||
'JSONB' |
|
||||
'REGNAMESPACE' |
|
||||
'REGROLE';
|
||||
|
||||
export type TypesBuiltins = {[key in builtinsTypes]: TypeId};
|
||||
|
||||
export type TypeFormat = 'text' | 'binary';
|
||||
|
||||
export const builtins: TypesBuiltins;
|
||||
|
||||
export function setTypeParser (id: TypeId, parseFn: ((value: string) => any)): void;
|
||||
export function setTypeParser (id: TypeId, format: TypeFormat, parseFn: (value: string) => any): void;
|
||||
|
||||
export const getTypeParser: (id: TypeId, format?: TypeFormat) => any
|
||||
|
||||
export const arrayParser: (source: string, transform: (entry: any) => any) => any[];
|
47
lib/node/node_modules/pg-types/index.js
generated
vendored
Normal file
47
lib/node/node_modules/pg-types/index.js
generated
vendored
Normal file
|
@ -0,0 +1,47 @@
|
|||
var textParsers = require('./lib/textParsers');
|
||||
var binaryParsers = require('./lib/binaryParsers');
|
||||
var arrayParser = require('./lib/arrayParser');
|
||||
var builtinTypes = require('./lib/builtins');
|
||||
|
||||
exports.getTypeParser = getTypeParser;
|
||||
exports.setTypeParser = setTypeParser;
|
||||
exports.arrayParser = arrayParser;
|
||||
exports.builtins = builtinTypes;
|
||||
|
||||
var typeParsers = {
|
||||
text: {},
|
||||
binary: {}
|
||||
};
|
||||
|
||||
//the empty parse function
|
||||
function noParse (val) {
|
||||
return String(val);
|
||||
};
|
||||
|
||||
//returns a function used to convert a specific type (specified by
|
||||
//oid) into a result javascript type
|
||||
//note: the oid can be obtained via the following sql query:
|
||||
//SELECT oid FROM pg_type WHERE typname = 'TYPE_NAME_HERE';
|
||||
function getTypeParser (oid, format) {
|
||||
format = format || 'text';
|
||||
if (!typeParsers[format]) {
|
||||
return noParse;
|
||||
}
|
||||
return typeParsers[format][oid] || noParse;
|
||||
};
|
||||
|
||||
function setTypeParser (oid, format, parseFn) {
|
||||
if(typeof format == 'function') {
|
||||
parseFn = format;
|
||||
format = 'text';
|
||||
}
|
||||
typeParsers[format][oid] = parseFn;
|
||||
};
|
||||
|
||||
textParsers.init(function(oid, converter) {
|
||||
typeParsers.text[oid] = converter;
|
||||
});
|
||||
|
||||
binaryParsers.init(function(oid, converter) {
|
||||
typeParsers.binary[oid] = converter;
|
||||
});
|
21
lib/node/node_modules/pg-types/index.test-d.ts
generated
vendored
Normal file
21
lib/node/node_modules/pg-types/index.test-d.ts
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
import * as types from '.';
|
||||
import { expectType } from 'tsd';
|
||||
|
||||
// builtins
|
||||
expectType<types.TypesBuiltins>(types.builtins);
|
||||
|
||||
// getTypeParser
|
||||
const noParse = types.getTypeParser(types.builtins.NUMERIC, 'text');
|
||||
const numericParser = types.getTypeParser(types.builtins.NUMERIC, 'binary');
|
||||
expectType<string>(noParse('noParse'));
|
||||
expectType<number>(numericParser([200, 1, 0, 15]));
|
||||
|
||||
// getArrayParser
|
||||
const value = types.arrayParser('{1,2,3}', (num) => parseInt(num));
|
||||
expectType<number[]>(value);
|
||||
|
||||
//setTypeParser
|
||||
types.setTypeParser(types.builtins.INT8, parseInt);
|
||||
types.setTypeParser(types.builtins.FLOAT8, parseFloat);
|
||||
types.setTypeParser(types.builtins.FLOAT8, 'binary', (data) => data[0]);
|
||||
types.setTypeParser(types.builtins.FLOAT8, 'text', parseFloat);
|
11
lib/node/node_modules/pg-types/lib/arrayParser.js
generated
vendored
Normal file
11
lib/node/node_modules/pg-types/lib/arrayParser.js
generated
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
var array = require('postgres-array');
|
||||
|
||||
module.exports = {
|
||||
create: function (source, transform) {
|
||||
return {
|
||||
parse: function() {
|
||||
return array.parse(source, transform);
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
257
lib/node/node_modules/pg-types/lib/binaryParsers.js
generated
vendored
Normal file
257
lib/node/node_modules/pg-types/lib/binaryParsers.js
generated
vendored
Normal file
|
@ -0,0 +1,257 @@
|
|||
var parseInt64 = require('pg-int8');
|
||||
|
||||
var parseBits = function(data, bits, offset, invert, callback) {
|
||||
offset = offset || 0;
|
||||
invert = invert || false;
|
||||
callback = callback || function(lastValue, newValue, bits) { return (lastValue * Math.pow(2, bits)) + newValue; };
|
||||
var offsetBytes = offset >> 3;
|
||||
|
||||
var inv = function(value) {
|
||||
if (invert) {
|
||||
return ~value & 0xff;
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
|
||||
// read first (maybe partial) byte
|
||||
var mask = 0xff;
|
||||
var firstBits = 8 - (offset % 8);
|
||||
if (bits < firstBits) {
|
||||
mask = (0xff << (8 - bits)) & 0xff;
|
||||
firstBits = bits;
|
||||
}
|
||||
|
||||
if (offset) {
|
||||
mask = mask >> (offset % 8);
|
||||
}
|
||||
|
||||
var result = 0;
|
||||
if ((offset % 8) + bits >= 8) {
|
||||
result = callback(0, inv(data[offsetBytes]) & mask, firstBits);
|
||||
}
|
||||
|
||||
// read bytes
|
||||
var bytes = (bits + offset) >> 3;
|
||||
for (var i = offsetBytes + 1; i < bytes; i++) {
|
||||
result = callback(result, inv(data[i]), 8);
|
||||
}
|
||||
|
||||
// bits to read, that are not a complete byte
|
||||
var lastBits = (bits + offset) % 8;
|
||||
if (lastBits > 0) {
|
||||
result = callback(result, inv(data[bytes]) >> (8 - lastBits), lastBits);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
var parseFloatFromBits = function(data, precisionBits, exponentBits) {
|
||||
var bias = Math.pow(2, exponentBits - 1) - 1;
|
||||
var sign = parseBits(data, 1);
|
||||
var exponent = parseBits(data, exponentBits, 1);
|
||||
|
||||
if (exponent === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// parse mantissa
|
||||
var precisionBitsCounter = 1;
|
||||
var parsePrecisionBits = function(lastValue, newValue, bits) {
|
||||
if (lastValue === 0) {
|
||||
lastValue = 1;
|
||||
}
|
||||
|
||||
for (var i = 1; i <= bits; i++) {
|
||||
precisionBitsCounter /= 2;
|
||||
if ((newValue & (0x1 << (bits - i))) > 0) {
|
||||
lastValue += precisionBitsCounter;
|
||||
}
|
||||
}
|
||||
|
||||
return lastValue;
|
||||
};
|
||||
|
||||
var mantissa = parseBits(data, precisionBits, exponentBits + 1, false, parsePrecisionBits);
|
||||
|
||||
// special cases
|
||||
if (exponent == (Math.pow(2, exponentBits + 1) - 1)) {
|
||||
if (mantissa === 0) {
|
||||
return (sign === 0) ? Infinity : -Infinity;
|
||||
}
|
||||
|
||||
return NaN;
|
||||
}
|
||||
|
||||
// normale number
|
||||
return ((sign === 0) ? 1 : -1) * Math.pow(2, exponent - bias) * mantissa;
|
||||
};
|
||||
|
||||
var parseInt16 = function(value) {
|
||||
if (parseBits(value, 1) == 1) {
|
||||
return -1 * (parseBits(value, 15, 1, true) + 1);
|
||||
}
|
||||
|
||||
return parseBits(value, 15, 1);
|
||||
};
|
||||
|
||||
var parseInt32 = function(value) {
|
||||
if (parseBits(value, 1) == 1) {
|
||||
return -1 * (parseBits(value, 31, 1, true) + 1);
|
||||
}
|
||||
|
||||
return parseBits(value, 31, 1);
|
||||
};
|
||||
|
||||
var parseFloat32 = function(value) {
|
||||
return parseFloatFromBits(value, 23, 8);
|
||||
};
|
||||
|
||||
var parseFloat64 = function(value) {
|
||||
return parseFloatFromBits(value, 52, 11);
|
||||
};
|
||||
|
||||
var parseNumeric = function(value) {
|
||||
var sign = parseBits(value, 16, 32);
|
||||
if (sign == 0xc000) {
|
||||
return NaN;
|
||||
}
|
||||
|
||||
var weight = Math.pow(10000, parseBits(value, 16, 16));
|
||||
var result = 0;
|
||||
|
||||
var digits = [];
|
||||
var ndigits = parseBits(value, 16);
|
||||
for (var i = 0; i < ndigits; i++) {
|
||||
result += parseBits(value, 16, 64 + (16 * i)) * weight;
|
||||
weight /= 10000;
|
||||
}
|
||||
|
||||
var scale = Math.pow(10, parseBits(value, 16, 48));
|
||||
return ((sign === 0) ? 1 : -1) * Math.round(result * scale) / scale;
|
||||
};
|
||||
|
||||
var parseDate = function(isUTC, value) {
|
||||
var sign = parseBits(value, 1);
|
||||
var rawValue = parseBits(value, 63, 1);
|
||||
|
||||
// discard usecs and shift from 2000 to 1970
|
||||
var result = new Date((((sign === 0) ? 1 : -1) * rawValue / 1000) + 946684800000);
|
||||
|
||||
if (!isUTC) {
|
||||
result.setTime(result.getTime() + result.getTimezoneOffset() * 60000);
|
||||
}
|
||||
|
||||
// add microseconds to the date
|
||||
result.usec = rawValue % 1000;
|
||||
result.getMicroSeconds = function() {
|
||||
return this.usec;
|
||||
};
|
||||
result.setMicroSeconds = function(value) {
|
||||
this.usec = value;
|
||||
};
|
||||
result.getUTCMicroSeconds = function() {
|
||||
return this.usec;
|
||||
};
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
var parseArray = function(value) {
|
||||
var dim = parseBits(value, 32);
|
||||
|
||||
var flags = parseBits(value, 32, 32);
|
||||
var elementType = parseBits(value, 32, 64);
|
||||
|
||||
var offset = 96;
|
||||
var dims = [];
|
||||
for (var i = 0; i < dim; i++) {
|
||||
// parse dimension
|
||||
dims[i] = parseBits(value, 32, offset);
|
||||
offset += 32;
|
||||
|
||||
// ignore lower bounds
|
||||
offset += 32;
|
||||
}
|
||||
|
||||
var parseElement = function(elementType) {
|
||||
// parse content length
|
||||
var length = parseBits(value, 32, offset);
|
||||
offset += 32;
|
||||
|
||||
// parse null values
|
||||
if (length == 0xffffffff) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var result;
|
||||
if ((elementType == 0x17) || (elementType == 0x14)) {
|
||||
// int/bigint
|
||||
result = parseBits(value, length * 8, offset);
|
||||
offset += length * 8;
|
||||
return result;
|
||||
}
|
||||
else if (elementType == 0x19) {
|
||||
// string
|
||||
result = value.toString(this.encoding, offset >> 3, (offset += (length << 3)) >> 3);
|
||||
return result;
|
||||
}
|
||||
else {
|
||||
console.log("ERROR: ElementType not implemented: " + elementType);
|
||||
}
|
||||
};
|
||||
|
||||
var parse = function(dimension, elementType) {
|
||||
var array = [];
|
||||
var i;
|
||||
|
||||
if (dimension.length > 1) {
|
||||
var count = dimension.shift();
|
||||
for (i = 0; i < count; i++) {
|
||||
array[i] = parse(dimension, elementType);
|
||||
}
|
||||
dimension.unshift(count);
|
||||
}
|
||||
else {
|
||||
for (i = 0; i < dimension[0]; i++) {
|
||||
array[i] = parseElement(elementType);
|
||||
}
|
||||
}
|
||||
|
||||
return array;
|
||||
};
|
||||
|
||||
return parse(dims, elementType);
|
||||
};
|
||||
|
||||
var parseText = function(value) {
|
||||
return value.toString('utf8');
|
||||
};
|
||||
|
||||
var parseBool = function(value) {
|
||||
if(value === null) return null;
|
||||
return (parseBits(value, 8) > 0);
|
||||
};
|
||||
|
||||
var init = function(register) {
|
||||
register(20, parseInt64);
|
||||
register(21, parseInt16);
|
||||
register(23, parseInt32);
|
||||
register(26, parseInt32);
|
||||
register(1700, parseNumeric);
|
||||
register(700, parseFloat32);
|
||||
register(701, parseFloat64);
|
||||
register(16, parseBool);
|
||||
register(1114, parseDate.bind(null, false));
|
||||
register(1184, parseDate.bind(null, true));
|
||||
register(1000, parseArray);
|
||||
register(1007, parseArray);
|
||||
register(1016, parseArray);
|
||||
register(1008, parseArray);
|
||||
register(1009, parseArray);
|
||||
register(25, parseText);
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
init: init
|
||||
};
|
73
lib/node/node_modules/pg-types/lib/builtins.js
generated
vendored
Normal file
73
lib/node/node_modules/pg-types/lib/builtins.js
generated
vendored
Normal file
|
@ -0,0 +1,73 @@
|
|||
/**
|
||||
* Following query was used to generate this file:
|
||||
|
||||
SELECT json_object_agg(UPPER(PT.typname), PT.oid::int4 ORDER BY pt.oid)
|
||||
FROM pg_type PT
|
||||
WHERE typnamespace = (SELECT pgn.oid FROM pg_namespace pgn WHERE nspname = 'pg_catalog') -- Take only builting Postgres types with stable OID (extension types are not guaranted to be stable)
|
||||
AND typtype = 'b' -- Only basic types
|
||||
AND typelem = 0 -- Ignore aliases
|
||||
AND typisdefined -- Ignore undefined types
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
BOOL: 16,
|
||||
BYTEA: 17,
|
||||
CHAR: 18,
|
||||
INT8: 20,
|
||||
INT2: 21,
|
||||
INT4: 23,
|
||||
REGPROC: 24,
|
||||
TEXT: 25,
|
||||
OID: 26,
|
||||
TID: 27,
|
||||
XID: 28,
|
||||
CID: 29,
|
||||
JSON: 114,
|
||||
XML: 142,
|
||||
PG_NODE_TREE: 194,
|
||||
SMGR: 210,
|
||||
PATH: 602,
|
||||
POLYGON: 604,
|
||||
CIDR: 650,
|
||||
FLOAT4: 700,
|
||||
FLOAT8: 701,
|
||||
ABSTIME: 702,
|
||||
RELTIME: 703,
|
||||
TINTERVAL: 704,
|
||||
CIRCLE: 718,
|
||||
MACADDR8: 774,
|
||||
MONEY: 790,
|
||||
MACADDR: 829,
|
||||
INET: 869,
|
||||
ACLITEM: 1033,
|
||||
BPCHAR: 1042,
|
||||
VARCHAR: 1043,
|
||||
DATE: 1082,
|
||||
TIME: 1083,
|
||||
TIMESTAMP: 1114,
|
||||
TIMESTAMPTZ: 1184,
|
||||
INTERVAL: 1186,
|
||||
TIMETZ: 1266,
|
||||
BIT: 1560,
|
||||
VARBIT: 1562,
|
||||
NUMERIC: 1700,
|
||||
REFCURSOR: 1790,
|
||||
REGPROCEDURE: 2202,
|
||||
REGOPER: 2203,
|
||||
REGOPERATOR: 2204,
|
||||
REGCLASS: 2205,
|
||||
REGTYPE: 2206,
|
||||
UUID: 2950,
|
||||
TXID_SNAPSHOT: 2970,
|
||||
PG_LSN: 3220,
|
||||
PG_NDISTINCT: 3361,
|
||||
PG_DEPENDENCIES: 3402,
|
||||
TSVECTOR: 3614,
|
||||
TSQUERY: 3615,
|
||||
GTSVECTOR: 3642,
|
||||
REGCONFIG: 3734,
|
||||
REGDICTIONARY: 3769,
|
||||
JSONB: 3802,
|
||||
REGNAMESPACE: 4089,
|
||||
REGROLE: 4096
|
||||
};
|
215
lib/node/node_modules/pg-types/lib/textParsers.js
generated
vendored
Normal file
215
lib/node/node_modules/pg-types/lib/textParsers.js
generated
vendored
Normal file
|
@ -0,0 +1,215 @@
|
|||
var array = require('postgres-array')
|
||||
var arrayParser = require('./arrayParser');
|
||||
var parseDate = require('postgres-date');
|
||||
var parseInterval = require('postgres-interval');
|
||||
var parseByteA = require('postgres-bytea');
|
||||
|
||||
function allowNull (fn) {
|
||||
return function nullAllowed (value) {
|
||||
if (value === null) return value
|
||||
return fn(value)
|
||||
}
|
||||
}
|
||||
|
||||
function parseBool (value) {
|
||||
if (value === null) return value
|
||||
return value === 'TRUE' ||
|
||||
value === 't' ||
|
||||
value === 'true' ||
|
||||
value === 'y' ||
|
||||
value === 'yes' ||
|
||||
value === 'on' ||
|
||||
value === '1';
|
||||
}
|
||||
|
||||
function parseBoolArray (value) {
|
||||
if (!value) return null
|
||||
return array.parse(value, parseBool)
|
||||
}
|
||||
|
||||
function parseBaseTenInt (string) {
|
||||
return parseInt(string, 10)
|
||||
}
|
||||
|
||||
function parseIntegerArray (value) {
|
||||
if (!value) return null
|
||||
return array.parse(value, allowNull(parseBaseTenInt))
|
||||
}
|
||||
|
||||
function parseBigIntegerArray (value) {
|
||||
if (!value) return null
|
||||
return array.parse(value, allowNull(function (entry) {
|
||||
return parseBigInteger(entry).trim()
|
||||
}))
|
||||
}
|
||||
|
||||
var parsePointArray = function(value) {
|
||||
if(!value) { return null; }
|
||||
var p = arrayParser.create(value, function(entry) {
|
||||
if(entry !== null) {
|
||||
entry = parsePoint(entry);
|
||||
}
|
||||
return entry;
|
||||
});
|
||||
|
||||
return p.parse();
|
||||
};
|
||||
|
||||
var parseFloatArray = function(value) {
|
||||
if(!value) { return null; }
|
||||
var p = arrayParser.create(value, function(entry) {
|
||||
if(entry !== null) {
|
||||
entry = parseFloat(entry);
|
||||
}
|
||||
return entry;
|
||||
});
|
||||
|
||||
return p.parse();
|
||||
};
|
||||
|
||||
var parseStringArray = function(value) {
|
||||
if(!value) { return null; }
|
||||
|
||||
var p = arrayParser.create(value);
|
||||
return p.parse();
|
||||
};
|
||||
|
||||
var parseDateArray = function(value) {
|
||||
if (!value) { return null; }
|
||||
|
||||
var p = arrayParser.create(value, function(entry) {
|
||||
if (entry !== null) {
|
||||
entry = parseDate(entry);
|
||||
}
|
||||
return entry;
|
||||
});
|
||||
|
||||
return p.parse();
|
||||
};
|
||||
|
||||
var parseIntervalArray = function(value) {
|
||||
if (!value) { return null; }
|
||||
|
||||
var p = arrayParser.create(value, function(entry) {
|
||||
if (entry !== null) {
|
||||
entry = parseInterval(entry);
|
||||
}
|
||||
return entry;
|
||||
});
|
||||
|
||||
return p.parse();
|
||||
};
|
||||
|
||||
var parseByteAArray = function(value) {
|
||||
if (!value) { return null; }
|
||||
|
||||
return array.parse(value, allowNull(parseByteA));
|
||||
};
|
||||
|
||||
var parseInteger = function(value) {
|
||||
return parseInt(value, 10);
|
||||
};
|
||||
|
||||
var parseBigInteger = function(value) {
|
||||
var valStr = String(value);
|
||||
if (/^\d+$/.test(valStr)) { return valStr; }
|
||||
return value;
|
||||
};
|
||||
|
||||
var parseJsonArray = function(value) {
|
||||
if (!value) { return null; }
|
||||
|
||||
return array.parse(value, allowNull(JSON.parse));
|
||||
};
|
||||
|
||||
var parsePoint = function(value) {
|
||||
if (value[0] !== '(') { return null; }
|
||||
|
||||
value = value.substring( 1, value.length - 1 ).split(',');
|
||||
|
||||
return {
|
||||
x: parseFloat(value[0])
|
||||
, y: parseFloat(value[1])
|
||||
};
|
||||
};
|
||||
|
||||
var parseCircle = function(value) {
|
||||
if (value[0] !== '<' && value[1] !== '(') { return null; }
|
||||
|
||||
var point = '(';
|
||||
var radius = '';
|
||||
var pointParsed = false;
|
||||
for (var i = 2; i < value.length - 1; i++){
|
||||
if (!pointParsed) {
|
||||
point += value[i];
|
||||
}
|
||||
|
||||
if (value[i] === ')') {
|
||||
pointParsed = true;
|
||||
continue;
|
||||
} else if (!pointParsed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (value[i] === ','){
|
||||
continue;
|
||||
}
|
||||
|
||||
radius += value[i];
|
||||
}
|
||||
var result = parsePoint(point);
|
||||
result.radius = parseFloat(radius);
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
var init = function(register) {
|
||||
register(20, parseBigInteger); // int8
|
||||
register(21, parseInteger); // int2
|
||||
register(23, parseInteger); // int4
|
||||
register(26, parseInteger); // oid
|
||||
register(700, parseFloat); // float4/real
|
||||
register(701, parseFloat); // float8/double
|
||||
register(16, parseBool);
|
||||
register(1082, parseDate); // date
|
||||
register(1114, parseDate); // timestamp without timezone
|
||||
register(1184, parseDate); // timestamp
|
||||
register(600, parsePoint); // point
|
||||
register(651, parseStringArray); // cidr[]
|
||||
register(718, parseCircle); // circle
|
||||
register(1000, parseBoolArray);
|
||||
register(1001, parseByteAArray);
|
||||
register(1005, parseIntegerArray); // _int2
|
||||
register(1007, parseIntegerArray); // _int4
|
||||
register(1028, parseIntegerArray); // oid[]
|
||||
register(1016, parseBigIntegerArray); // _int8
|
||||
register(1017, parsePointArray); // point[]
|
||||
register(1021, parseFloatArray); // _float4
|
||||
register(1022, parseFloatArray); // _float8
|
||||
register(1231, parseFloatArray); // _numeric
|
||||
register(1014, parseStringArray); //char
|
||||
register(1015, parseStringArray); //varchar
|
||||
register(1008, parseStringArray);
|
||||
register(1009, parseStringArray);
|
||||
register(1040, parseStringArray); // macaddr[]
|
||||
register(1041, parseStringArray); // inet[]
|
||||
register(1115, parseDateArray); // timestamp without time zone[]
|
||||
register(1182, parseDateArray); // _date
|
||||
register(1185, parseDateArray); // timestamp with time zone[]
|
||||
register(1186, parseInterval);
|
||||
register(1187, parseIntervalArray);
|
||||
register(17, parseByteA);
|
||||
register(114, JSON.parse.bind(JSON)); // json
|
||||
register(3802, JSON.parse.bind(JSON)); // jsonb
|
||||
register(199, parseJsonArray); // json[]
|
||||
register(3807, parseJsonArray); // jsonb[]
|
||||
register(3907, parseStringArray); // numrange[]
|
||||
register(2951, parseStringArray); // uuid[]
|
||||
register(791, parseStringArray); // money[]
|
||||
register(1183, parseStringArray); // time[]
|
||||
register(1270, parseStringArray); // timetz[]
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
init: init
|
||||
};
|
42
lib/node/node_modules/pg-types/package.json
generated
vendored
Normal file
42
lib/node/node_modules/pg-types/package.json
generated
vendored
Normal file
|
@ -0,0 +1,42 @@
|
|||
{
|
||||
"name": "pg-types",
|
||||
"version": "2.2.0",
|
||||
"description": "Query result type converters for node-postgres",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "tape test/*.js | tap-spec && npm run test-ts",
|
||||
"test-ts": "if-node-version '>= 8' tsd"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/brianc/node-pg-types.git"
|
||||
},
|
||||
"keywords": [
|
||||
"postgres",
|
||||
"PostgreSQL",
|
||||
"pg"
|
||||
],
|
||||
"author": "Brian M. Carlson",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/brianc/node-pg-types/issues"
|
||||
},
|
||||
"homepage": "https://github.com/brianc/node-pg-types",
|
||||
"devDependencies": {
|
||||
"if-node-version": "^1.1.1",
|
||||
"pff": "^1.0.0",
|
||||
"tap-spec": "^4.0.0",
|
||||
"tape": "^4.0.0",
|
||||
"tsd": "^0.7.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"pg-int8": "1.0.1",
|
||||
"postgres-array": "~2.0.0",
|
||||
"postgres-bytea": "~1.0.0",
|
||||
"postgres-date": "~1.0.4",
|
||||
"postgres-interval": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
}
|
24
lib/node/node_modules/pg-types/test/index.js
generated
vendored
Normal file
24
lib/node/node_modules/pg-types/test/index.js
generated
vendored
Normal file
|
@ -0,0 +1,24 @@
|
|||
|
||||
var test = require('tape')
|
||||
var printf = require('pff')
|
||||
var getTypeParser = require('../').getTypeParser
|
||||
var types = require('./types')
|
||||
|
||||
test('types', function (t) {
|
||||
Object.keys(types).forEach(function (typeName) {
|
||||
var type = types[typeName]
|
||||
t.test(typeName, function (t) {
|
||||
var parser = getTypeParser(type.id, type.format)
|
||||
type.tests.forEach(function (tests) {
|
||||
var input = tests[0]
|
||||
var expected = tests[1]
|
||||
var result = parser(input)
|
||||
if (typeof expected === 'function') {
|
||||
return expected(t, result)
|
||||
}
|
||||
t.equal(result, expected)
|
||||
})
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue