Erster Docker-Stand
This commit is contained in:
146
_node_modules/@electric-sql/pglite-socket/CHANGELOG.md
generated
Normal file
146
_node_modules/@electric-sql/pglite-socket/CHANGELOG.md
generated
Normal file
@@ -0,0 +1,146 @@
|
||||
# @electric-sql/pglite-socket
|
||||
|
||||
## 0.0.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 54a4873: allow extensions to be loaded via '-e/--extensions <list>' cmd line parameter'
|
||||
- 45bff97: added pgcrypto extension
|
||||
- Updated dependencies [45bff97]
|
||||
- Updated dependencies [5ec474f]
|
||||
- @electric-sql/pglite@0.3.15
|
||||
|
||||
## 0.0.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [8785034]
|
||||
- Updated dependencies [90cfee8]
|
||||
- @electric-sql/pglite@0.3.14
|
||||
|
||||
## 0.0.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ad3d0d8]
|
||||
- @electric-sql/pglite@0.3.13
|
||||
|
||||
## 0.0.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ce0e74e]
|
||||
- @electric-sql/pglite@0.3.12
|
||||
|
||||
## 0.0.16
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [9a104b9]
|
||||
- @electric-sql/pglite@0.3.11
|
||||
|
||||
## 0.0.15
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ad765ed]
|
||||
- @electric-sql/pglite@0.3.10
|
||||
|
||||
## 0.0.14
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- e40ccad: Upgrade emsdk
|
||||
- Updated dependencies [e40ccad]
|
||||
- @electric-sql/pglite@0.3.9
|
||||
|
||||
## 0.0.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- bd263aa: fix oom; other fixes
|
||||
- Updated dependencies [f12a582]
|
||||
- Updated dependencies [bd263aa]
|
||||
- @electric-sql/pglite@0.3.8
|
||||
|
||||
## 0.0.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [0936962]
|
||||
- @electric-sql/pglite@0.3.7
|
||||
|
||||
## 0.0.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [6898469]
|
||||
- Updated dependencies [469be18]
|
||||
- Updated dependencies [64e33c7]
|
||||
- @electric-sql/pglite@0.3.6
|
||||
|
||||
## 0.0.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [6653899]
|
||||
- Updated dependencies [5f007fc]
|
||||
- @electric-sql/pglite@0.3.5
|
||||
|
||||
## 0.0.9
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 38a55d0: fix cjs/esm misconfigurations
|
||||
- Updated dependencies [1fcaa3e]
|
||||
- Updated dependencies [38a55d0]
|
||||
- Updated dependencies [aac7003]
|
||||
- Updated dependencies [8ca254d]
|
||||
- @electric-sql/pglite@0.3.4
|
||||
|
||||
## 0.0.8
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ea2c7c7]
|
||||
- @electric-sql/pglite@0.3.3
|
||||
|
||||
## 0.0.7
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 5a47f4d: better handling of closing the socket
|
||||
- 6f8dd08: with the `npx pglite-server` command, add the ability to pass a command to run after the server is ready, along with passing a new DATABASE_URL environment variable to the command. This allows for a command like `npx pglite-server -r "npm run dev:inner" --include-database-url` to run a dev server that uses the pglite server as the database.
|
||||
|
||||
## 0.0.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [e2c654b]
|
||||
- @electric-sql/pglite@0.3.2
|
||||
|
||||
## 0.0.5
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- f975f77: Updated README
|
||||
- d9b52d5: allows unix socket connections
|
||||
|
||||
## 0.0.4
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 027baed: missing shebang
|
||||
|
||||
## 0.0.3
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 1c2dc84: fix pglite-socket exports
|
||||
|
||||
## 0.0.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [713364e]
|
||||
- @electric-sql/pglite@0.3.1
|
||||
176
_node_modules/@electric-sql/pglite-socket/LICENSE
generated
Normal file
176
_node_modules/@electric-sql/pglite-socket/LICENSE
generated
Normal file
@@ -0,0 +1,176 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
265
_node_modules/@electric-sql/pglite-socket/README.md
generated
Normal file
265
_node_modules/@electric-sql/pglite-socket/README.md
generated
Normal file
@@ -0,0 +1,265 @@
|
||||
# pglite-socket
|
||||
|
||||
A socket implementation for PGlite enabling remote connections. This package is a simple wrapper around the `net` module to allow PGlite to be used as a PostgreSQL server.
|
||||
|
||||
There are two main components to this package:
|
||||
|
||||
- [`PGLiteSocketServer`](#pglitesocketserver) - A TCP server that allows PostgreSQL clients to connect to a PGlite database instance.
|
||||
- [`PGLiteSocketHandler`](#pglitesockethandler) - A low-level handler for a single socket connection to PGlite. This class handles the raw protocol communication between a socket and PGlite, and can be used to create a custom server.
|
||||
|
||||
The package also includes a [CLI](#cli-usage) for quickly starting a PGlite socket server.
|
||||
|
||||
Note: As PGlite is a single-connection database, it is not possible to have multiple simultaneous connections open. This means that the socket server will only support a single client connection at a time. While a `PGLiteSocketServer` or `PGLiteSocketHandler` are attached to a PGlite instance they hold an exclusive lock preventing any other connections, or queries on the PGlite instance.
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @electric-sql/pglite-socket
|
||||
# or
|
||||
yarn add @electric-sql/pglite-socket
|
||||
# or
|
||||
pnpm add @electric-sql/pglite-socket
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import { PGlite } from '@electric-sql/pglite'
|
||||
import { PGLiteSocketServer } from '@electric-sql/pglite-socket'
|
||||
|
||||
// Create a PGlite instance
|
||||
const db = await PGlite.create()
|
||||
|
||||
// Create and start a socket server
|
||||
const server = new PGLiteSocketServer({
|
||||
db,
|
||||
port: 5432,
|
||||
host: '127.0.0.1',
|
||||
})
|
||||
|
||||
await server.start()
|
||||
console.log('Server started on 127.0.0.1:5432')
|
||||
|
||||
// Handle graceful shutdown
|
||||
process.on('SIGINT', async () => {
|
||||
await server.stop()
|
||||
await db.close()
|
||||
console.log('Server stopped and database closed')
|
||||
process.exit(0)
|
||||
})
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### PGLiteSocketServer
|
||||
|
||||
Creates a TCP server that allows PostgreSQL clients to connect to a PGlite database instance.
|
||||
|
||||
#### Options
|
||||
|
||||
- `db: PGlite` - The PGlite database instance
|
||||
- `port?: number` - The port to listen on (default: 5432). Use port 0 to let the OS assign an available port
|
||||
- `host?: string` - The host to bind to (default: 127.0.0.1)
|
||||
- `path?: string` - Unix socket path to bind to (takes precedence over host:port)
|
||||
- `inspect?: boolean` - Print the incoming and outgoing data to the console (default: false)
|
||||
|
||||
#### Methods
|
||||
|
||||
- `start(): Promise<void>` - Start the socket server
|
||||
- `stop(): Promise<void>` - Stop the socket server
|
||||
|
||||
#### Events
|
||||
|
||||
- `listening` - Emitted when the server starts listening
|
||||
- `connection` - Emitted when a client connects
|
||||
- `error` - Emitted when an error occurs
|
||||
- `close` - Emitted when the server is closed
|
||||
|
||||
### PGLiteSocketHandler
|
||||
|
||||
Low-level handler for a single socket connection to PGlite. This class handles the raw protocol communication between a socket and PGlite.
|
||||
|
||||
#### Options
|
||||
|
||||
- `db: PGlite` - The PGlite database instance
|
||||
- `closeOnDetach?: boolean` - Whether to close the socket when detached (default: false)
|
||||
- `inspect?: boolean` - Print the incoming and outgoing data to the console in hex and ascii (default: false)
|
||||
|
||||
#### Methods
|
||||
|
||||
- `attach(socket: Socket): Promise<PGLiteSocketHandler>` - Attach a socket to this handler
|
||||
- `detach(close?: boolean): PGLiteSocketHandler` - Detach the current socket from this handler
|
||||
- `isAttached: boolean` - Check if a socket is currently attached
|
||||
|
||||
#### Events
|
||||
|
||||
- `data` - Emitted when data is processed through the handler
|
||||
- `error` - Emitted when an error occurs
|
||||
- `close` - Emitted when the socket is closed
|
||||
|
||||
#### Example
|
||||
|
||||
```typescript
|
||||
import { PGlite } from '@electric-sql/pglite'
|
||||
import { PGLiteSocketHandler } from '@electric-sql/pglite-socket'
|
||||
import { createServer, Socket } from 'net'
|
||||
|
||||
// Create a PGlite instance
|
||||
const db = await PGlite.create()
|
||||
|
||||
// Create a handler
|
||||
const handler = new PGLiteSocketHandler({
|
||||
db,
|
||||
closeOnDetach: true,
|
||||
inspect: false,
|
||||
})
|
||||
|
||||
// Create a server that uses the handler
|
||||
const server = createServer(async (socket: Socket) => {
|
||||
try {
|
||||
await handler.attach(socket)
|
||||
console.log('Client connected')
|
||||
} catch (err) {
|
||||
console.error('Error attaching socket', err)
|
||||
socket.end()
|
||||
}
|
||||
})
|
||||
|
||||
server.listen(5432, '127.0.0.1')
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
See the [examples directory](./examples) for more usage examples.
|
||||
|
||||
## CLI Usage
|
||||
|
||||
This package provides a command-line interface for quickly starting a PGlite socket server.
|
||||
|
||||
```bash
|
||||
# Install globally
|
||||
npm install -g @electric-sql/pglite-socket
|
||||
|
||||
# Start a server with default settings (in-memory database, port 5432)
|
||||
pglite-server
|
||||
|
||||
# Start a server with custom options
|
||||
pglite-server --db=/path/to/database --port=5433 --host=0.0.0.0 --debug=1
|
||||
|
||||
# Using short options
|
||||
pglite-server -d /path/to/database -p 5433 -h 0.0.0.0 -v 1
|
||||
|
||||
# Show help
|
||||
pglite-server --help
|
||||
```
|
||||
|
||||
### CLI Options
|
||||
|
||||
- `-d, --db=PATH` - Database path (default: memory://)
|
||||
- `-p, --port=PORT` - Port to listen on (default: 5432). Use 0 to let the OS assign an available port
|
||||
- `-h, --host=HOST` - Host to bind to (default: 127.0.0.1)
|
||||
- `-u, --path=UNIX` - Unix socket to bind to (takes precedence over host:port)
|
||||
- `-v, --debug=LEVEL` - Debug level 0-5 (default: 0)
|
||||
- `-e, --extensions=LIST` - Comma-separated list of extensions to load (e.g., vector,pgcrypto)
|
||||
- `-r, --run=COMMAND` - Command to run after server starts
|
||||
- `--include-database-url` - Include DATABASE_URL in subprocess environment
|
||||
- `--shutdown-timeout=MS` - Timeout for graceful subprocess shutdown in ms (default: 5000)
|
||||
|
||||
### Development Server Integration
|
||||
|
||||
The `--run` option is particularly useful for development workflows where you want to use PGlite as a drop-in replacement for PostgreSQL. This allows you to wrap your development server and automatically provide it with a DATABASE_URL pointing to your PGlite instance.
|
||||
|
||||
```bash
|
||||
# Start your Next.js dev server with PGlite
|
||||
pglite-server --run "npm run dev" --include-database-url
|
||||
|
||||
# Start a Node.js app with PGlite
|
||||
pglite-server --db=./dev-db --run "node server.js" --include-database-url
|
||||
|
||||
# Start multiple services (using a process manager like concurrently)
|
||||
pglite-server --run "npx concurrently 'npm run dev' 'npm run worker'" --include-database-url
|
||||
```
|
||||
|
||||
When using `--run` with `--include-database-url`, the subprocess will receive a `DATABASE_URL` environment variable with the correct connection string for your PGlite server. This enables seamless integration with applications that expect a PostgreSQL connection string.
|
||||
|
||||
### Using in npm scripts
|
||||
|
||||
You can add the CLI to your package.json scripts for convenient execution:
|
||||
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"db:start": "pglite-server --db=./data/mydb --port=5433",
|
||||
"db:dev": "pglite-server --db=memory:// --debug=1",
|
||||
"dev": "pglite-server --db=./dev-db --run 'npm run start:dev' --include-database-url",
|
||||
"dev:clean": "pglite-server --run 'npm run start:dev' --include-database-url"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Then run with:
|
||||
|
||||
```bash
|
||||
npm run dev # Start with persistent database
|
||||
npm run dev:clean # Start with in-memory database
|
||||
```
|
||||
|
||||
### Unix Socket Support
|
||||
|
||||
For better performance in local development, you can use Unix sockets instead of TCP:
|
||||
|
||||
```bash
|
||||
# Start server on a Unix socket
|
||||
pglite-server --path=/tmp/pglite.sock --run "npm run dev" --include-database-url
|
||||
|
||||
# The DATABASE_URL will be: postgresql://postgres:postgres@/postgres?host=/tmp
|
||||
```
|
||||
|
||||
### Connecting to the server
|
||||
|
||||
Once the server is running, you can connect to it using any PostgreSQL client:
|
||||
|
||||
#### Using psql
|
||||
|
||||
```bash
|
||||
PGSSLMODE=disable psql -h localhost -p 5432 -d template1
|
||||
```
|
||||
|
||||
#### Using Node.js clients
|
||||
|
||||
```javascript
|
||||
// Using node-postgres
|
||||
import pg from 'pg'
|
||||
const client = new pg.Client({
|
||||
host: 'localhost',
|
||||
port: 5432,
|
||||
database: 'template1'
|
||||
})
|
||||
await client.connect()
|
||||
|
||||
// Using postgres.js
|
||||
import postgres from 'postgres'
|
||||
const sql = postgres({
|
||||
host: 'localhost',
|
||||
port: 5432,
|
||||
database: 'template1'
|
||||
})
|
||||
|
||||
// Using environment variable (when using --include-database-url)
|
||||
const sql = postgres(process.env.DATABASE_URL)
|
||||
```
|
||||
|
||||
### Limitations and Tips
|
||||
|
||||
- Remember that PGlite only supports one connection at a time. If you're unable to connect, make sure no other client is currently connected.
|
||||
- For development purposes, using an in-memory database (`--db=memory://`) is fastest but data won't persist after the server is stopped.
|
||||
- For persistent storage, specify a file path for the database (e.g., `--db=./data/mydb`).
|
||||
- When using debug mode (`--debug=1` or higher), additional protocol information will be displayed in the console.
|
||||
- To allow connections from other machines, set the host to `0.0.0.0` with `--host=0.0.0.0`.
|
||||
- SSL connections are **NOT** supported. For `psql`, set env var `PGSSLMODE=disable`.
|
||||
- When using `--run`, the server will automatically shut down if the subprocess exits with a non-zero code.
|
||||
- Use `--shutdown-timeout` to adjust how long to wait for graceful subprocess termination (default: 5 seconds).
|
||||
|
||||
## License
|
||||
|
||||
Apache 2.0
|
||||
2
_node_modules/@electric-sql/pglite-socket/dist/chunk-F6KLIXM7.js
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite-socket/dist/chunk-F6KLIXM7.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
_node_modules/@electric-sql/pglite-socket/dist/chunk-F6KLIXM7.js.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite-socket/dist/chunk-F6KLIXM7.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
_node_modules/@electric-sql/pglite-socket/dist/index.cjs
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite-socket/dist/index.cjs
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
_node_modules/@electric-sql/pglite-socket/dist/index.cjs.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite-socket/dist/index.cjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
160
_node_modules/@electric-sql/pglite-socket/dist/index.d.cts
generated
vendored
Normal file
160
_node_modules/@electric-sql/pglite-socket/dist/index.d.cts
generated
vendored
Normal file
@@ -0,0 +1,160 @@
|
||||
import { PGlite } from '@electric-sql/pglite';
|
||||
import { Socket } from 'net';
|
||||
|
||||
declare const CONNECTION_QUEUE_TIMEOUT = 60000;
|
||||
/**
|
||||
* Options for creating a PGLiteSocketHandler
|
||||
*/
|
||||
interface PGLiteSocketHandlerOptions {
|
||||
/** The PGlite database instance */
|
||||
db: PGlite;
|
||||
/** Whether to close the socket when detached (default: false) */
|
||||
closeOnDetach?: boolean;
|
||||
/** Print the incoming and outgoing data to the console in hex and ascii */
|
||||
inspect?: boolean;
|
||||
/** Enable debug logging of method calls */
|
||||
debug?: boolean;
|
||||
}
|
||||
/**
|
||||
* Low-level handler for a single socket connection to PGLite
|
||||
* Handles the raw protocol communication between a socket and PGLite
|
||||
*/
|
||||
declare class PGLiteSocketHandler extends EventTarget {
|
||||
readonly db: PGlite;
|
||||
private socket;
|
||||
private active;
|
||||
private closeOnDetach;
|
||||
private resolveLock?;
|
||||
private rejectLock?;
|
||||
private inspect;
|
||||
private debug;
|
||||
private readonly id;
|
||||
private static nextHandlerId;
|
||||
/**
|
||||
* Create a new PGLiteSocketHandler
|
||||
* @param options Options for the handler
|
||||
*/
|
||||
constructor(options: PGLiteSocketHandlerOptions);
|
||||
/**
|
||||
* Get the unique ID of this handler
|
||||
*/
|
||||
get handlerId(): number;
|
||||
/**
|
||||
* Log a message if debug is enabled
|
||||
* @private
|
||||
*/
|
||||
private log;
|
||||
/**
|
||||
* Attach a socket to this handler
|
||||
* @param socket The socket to attach
|
||||
* @returns this handler instance
|
||||
* @throws Error if a socket is already attached
|
||||
*/
|
||||
attach(socket: Socket): Promise<PGLiteSocketHandler>;
|
||||
/**
|
||||
* Detach the current socket from this handler
|
||||
* @param close Whether to close the socket when detaching (overrides constructor option)
|
||||
* @returns this handler instance
|
||||
*/
|
||||
detach(close?: boolean): PGLiteSocketHandler;
|
||||
/**
|
||||
* Check if a socket is currently attached
|
||||
*/
|
||||
get isAttached(): boolean;
|
||||
/**
|
||||
* Handle incoming data from the socket
|
||||
*/
|
||||
private handleData;
|
||||
/**
|
||||
* Handle errors from the socket
|
||||
*/
|
||||
private handleError;
|
||||
/**
|
||||
* Handle socket close event
|
||||
*/
|
||||
private handleClose;
|
||||
/**
|
||||
* Print data in hex and ascii to the console
|
||||
*/
|
||||
private inspectData;
|
||||
}
|
||||
/**
|
||||
* Options for creating a PGLiteSocketServer
|
||||
*/
|
||||
interface PGLiteSocketServerOptions {
|
||||
/** The PGlite database instance */
|
||||
db: PGlite;
|
||||
/** The port to listen on (default: 5432) */
|
||||
port?: number;
|
||||
/** The host to bind to (default: 127.0.0.1) */
|
||||
host?: string;
|
||||
/** Unix socket path to bind to (default: undefined). If specified, takes precedence over host:port */
|
||||
path?: string;
|
||||
/** Print the incoming and outgoing data to the console in hex and ascii */
|
||||
inspect?: boolean;
|
||||
/** Connection queue timeout in milliseconds (default: 10000) */
|
||||
connectionQueueTimeout?: number;
|
||||
/** Enable debug logging of method calls */
|
||||
debug?: boolean;
|
||||
}
|
||||
/**
|
||||
* High-level server that manages socket connections to PGLite
|
||||
* Creates and manages a TCP server and handles client connections
|
||||
*/
|
||||
declare class PGLiteSocketServer extends EventTarget {
|
||||
readonly db: PGlite;
|
||||
private server;
|
||||
private port?;
|
||||
private host?;
|
||||
private path?;
|
||||
private active;
|
||||
private inspect;
|
||||
private debug;
|
||||
private connectionQueueTimeout;
|
||||
private activeHandler;
|
||||
private connectionQueue;
|
||||
private handlerCount;
|
||||
/**
|
||||
* Create a new PGLiteSocketServer
|
||||
* @param options Options for the server
|
||||
*/
|
||||
constructor(options: PGLiteSocketServerOptions);
|
||||
/**
|
||||
* Log a message if debug is enabled
|
||||
* @private
|
||||
*/
|
||||
private log;
|
||||
/**
|
||||
* Start the socket server
|
||||
* @returns Promise that resolves when the server is listening
|
||||
*/
|
||||
start(): Promise<void>;
|
||||
getServerConn(): string;
|
||||
/**
|
||||
* Stop the socket server
|
||||
* @returns Promise that resolves when the server is closed
|
||||
*/
|
||||
stop(): Promise<void>;
|
||||
/**
|
||||
* Get the active handler ID, or null if no active handler
|
||||
*/
|
||||
private get activeHandlerId();
|
||||
/**
|
||||
* Handle a new client connection
|
||||
*/
|
||||
private handleConnection;
|
||||
/**
|
||||
* Add a connection to the queue
|
||||
*/
|
||||
private enqueueConnection;
|
||||
/**
|
||||
* Process the next connection in the queue
|
||||
*/
|
||||
private processNextInQueue;
|
||||
/**
|
||||
* Attach a socket to a new handler
|
||||
*/
|
||||
private attachSocketToNewHandler;
|
||||
}
|
||||
|
||||
export { CONNECTION_QUEUE_TIMEOUT, PGLiteSocketHandler, type PGLiteSocketHandlerOptions, PGLiteSocketServer, type PGLiteSocketServerOptions };
|
||||
160
_node_modules/@electric-sql/pglite-socket/dist/index.d.ts
generated
vendored
Normal file
160
_node_modules/@electric-sql/pglite-socket/dist/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,160 @@
|
||||
import { PGlite } from '@electric-sql/pglite';
|
||||
import { Socket } from 'net';
|
||||
|
||||
declare const CONNECTION_QUEUE_TIMEOUT = 60000;
|
||||
/**
|
||||
* Options for creating a PGLiteSocketHandler
|
||||
*/
|
||||
interface PGLiteSocketHandlerOptions {
|
||||
/** The PGlite database instance */
|
||||
db: PGlite;
|
||||
/** Whether to close the socket when detached (default: false) */
|
||||
closeOnDetach?: boolean;
|
||||
/** Print the incoming and outgoing data to the console in hex and ascii */
|
||||
inspect?: boolean;
|
||||
/** Enable debug logging of method calls */
|
||||
debug?: boolean;
|
||||
}
|
||||
/**
|
||||
* Low-level handler for a single socket connection to PGLite
|
||||
* Handles the raw protocol communication between a socket and PGLite
|
||||
*/
|
||||
declare class PGLiteSocketHandler extends EventTarget {
|
||||
readonly db: PGlite;
|
||||
private socket;
|
||||
private active;
|
||||
private closeOnDetach;
|
||||
private resolveLock?;
|
||||
private rejectLock?;
|
||||
private inspect;
|
||||
private debug;
|
||||
private readonly id;
|
||||
private static nextHandlerId;
|
||||
/**
|
||||
* Create a new PGLiteSocketHandler
|
||||
* @param options Options for the handler
|
||||
*/
|
||||
constructor(options: PGLiteSocketHandlerOptions);
|
||||
/**
|
||||
* Get the unique ID of this handler
|
||||
*/
|
||||
get handlerId(): number;
|
||||
/**
|
||||
* Log a message if debug is enabled
|
||||
* @private
|
||||
*/
|
||||
private log;
|
||||
/**
|
||||
* Attach a socket to this handler
|
||||
* @param socket The socket to attach
|
||||
* @returns this handler instance
|
||||
* @throws Error if a socket is already attached
|
||||
*/
|
||||
attach(socket: Socket): Promise<PGLiteSocketHandler>;
|
||||
/**
|
||||
* Detach the current socket from this handler
|
||||
* @param close Whether to close the socket when detaching (overrides constructor option)
|
||||
* @returns this handler instance
|
||||
*/
|
||||
detach(close?: boolean): PGLiteSocketHandler;
|
||||
/**
|
||||
* Check if a socket is currently attached
|
||||
*/
|
||||
get isAttached(): boolean;
|
||||
/**
|
||||
* Handle incoming data from the socket
|
||||
*/
|
||||
private handleData;
|
||||
/**
|
||||
* Handle errors from the socket
|
||||
*/
|
||||
private handleError;
|
||||
/**
|
||||
* Handle socket close event
|
||||
*/
|
||||
private handleClose;
|
||||
/**
|
||||
* Print data in hex and ascii to the console
|
||||
*/
|
||||
private inspectData;
|
||||
}
|
||||
/**
|
||||
* Options for creating a PGLiteSocketServer
|
||||
*/
|
||||
interface PGLiteSocketServerOptions {
|
||||
/** The PGlite database instance */
|
||||
db: PGlite;
|
||||
/** The port to listen on (default: 5432) */
|
||||
port?: number;
|
||||
/** The host to bind to (default: 127.0.0.1) */
|
||||
host?: string;
|
||||
/** Unix socket path to bind to (default: undefined). If specified, takes precedence over host:port */
|
||||
path?: string;
|
||||
/** Print the incoming and outgoing data to the console in hex and ascii */
|
||||
inspect?: boolean;
|
||||
/** Connection queue timeout in milliseconds (default: 10000) */
|
||||
connectionQueueTimeout?: number;
|
||||
/** Enable debug logging of method calls */
|
||||
debug?: boolean;
|
||||
}
|
||||
/**
|
||||
* High-level server that manages socket connections to PGLite
|
||||
* Creates and manages a TCP server and handles client connections
|
||||
*/
|
||||
declare class PGLiteSocketServer extends EventTarget {
|
||||
readonly db: PGlite;
|
||||
private server;
|
||||
private port?;
|
||||
private host?;
|
||||
private path?;
|
||||
private active;
|
||||
private inspect;
|
||||
private debug;
|
||||
private connectionQueueTimeout;
|
||||
private activeHandler;
|
||||
private connectionQueue;
|
||||
private handlerCount;
|
||||
/**
|
||||
* Create a new PGLiteSocketServer
|
||||
* @param options Options for the server
|
||||
*/
|
||||
constructor(options: PGLiteSocketServerOptions);
|
||||
/**
|
||||
* Log a message if debug is enabled
|
||||
* @private
|
||||
*/
|
||||
private log;
|
||||
/**
|
||||
* Start the socket server
|
||||
* @returns Promise that resolves when the server is listening
|
||||
*/
|
||||
start(): Promise<void>;
|
||||
getServerConn(): string;
|
||||
/**
|
||||
* Stop the socket server
|
||||
* @returns Promise that resolves when the server is closed
|
||||
*/
|
||||
stop(): Promise<void>;
|
||||
/**
|
||||
* Get the active handler ID, or null if no active handler
|
||||
*/
|
||||
private get activeHandlerId();
|
||||
/**
|
||||
* Handle a new client connection
|
||||
*/
|
||||
private handleConnection;
|
||||
/**
|
||||
* Add a connection to the queue
|
||||
*/
|
||||
private enqueueConnection;
|
||||
/**
|
||||
* Process the next connection in the queue
|
||||
*/
|
||||
private processNextInQueue;
|
||||
/**
|
||||
* Attach a socket to a new handler
|
||||
*/
|
||||
private attachSocketToNewHandler;
|
||||
}
|
||||
|
||||
export { CONNECTION_QUEUE_TIMEOUT, PGLiteSocketHandler, type PGLiteSocketHandlerOptions, PGLiteSocketServer, type PGLiteSocketServerOptions };
|
||||
2
_node_modules/@electric-sql/pglite-socket/dist/index.js
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite-socket/dist/index.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import{a,b,c}from"./chunk-F6KLIXM7.js";export{a as CONNECTION_QUEUE_TIMEOUT,b as PGLiteSocketHandler,c as PGLiteSocketServer};
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
_node_modules/@electric-sql/pglite-socket/dist/index.js.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite-socket/dist/index.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
||||
19
_node_modules/@electric-sql/pglite-socket/dist/scripts/server.cjs
generated
vendored
Normal file
19
_node_modules/@electric-sql/pglite-socket/dist/scripts/server.cjs
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
_node_modules/@electric-sql/pglite-socket/dist/scripts/server.cjs.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite-socket/dist/scripts/server.cjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
_node_modules/@electric-sql/pglite-socket/dist/scripts/server.d.cts
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite-socket/dist/scripts/server.d.cts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
#!/usr/bin/env node
|
||||
1
_node_modules/@electric-sql/pglite-socket/dist/scripts/server.d.ts
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite-socket/dist/scripts/server.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
#!/usr/bin/env node
|
||||
19
_node_modules/@electric-sql/pglite-socket/dist/scripts/server.js
generated
vendored
Normal file
19
_node_modules/@electric-sql/pglite-socket/dist/scripts/server.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env node
|
||||
import{c as d}from"../chunk-F6KLIXM7.js";import{PGlite as h}from"@electric-sql/pglite";import{parseArgs as u}from"node:util";import{spawn as p}from"node:child_process";var r=u({options:{db:{type:"string",short:"d",default:"memory://",help:"Database path (relative or absolute). Use memory:// for in-memory database."},port:{type:"string",short:"p",default:"5432",help:"Port to listen on"},host:{type:"string",short:"h",default:"127.0.0.1",help:"Host to bind to"},path:{type:"string",short:"u",default:void 0,help:"unix socket to bind to. Takes precedence over host:port"},debug:{type:"string",short:"v",default:"0",help:"Debug level (0-5)"},extensions:{type:"string",short:"e",default:void 0,help:"Comma-separated list of extensions to load (e.g., vector,pgcrypto)"},run:{type:"string",short:"r",default:void 0,help:"Command to run after server starts"},"include-database-url":{type:"boolean",default:!1,help:"Include DATABASE_URL in the environment of the subprocess"},"shutdown-timeout":{type:"string",default:"5000",help:"Timeout in milliseconds for graceful subprocess shutdown (default: 5000)"},help:{type:"boolean",short:"?",default:!1,help:"Show help"}}}),g=`PGlite Socket Server
|
||||
Usage: pglite-server [options]
|
||||
|
||||
Options:
|
||||
-d, --db=PATH Database path (default: memory://)
|
||||
-p, --port=PORT Port to listen on (default: 5432)
|
||||
-h, --host=HOST Host to bind to (default: 127.0.0.1)
|
||||
-u, --path=UNIX Unix socket to bind to (default: undefined). Takes precedence over host:port
|
||||
-v, --debug=LEVEL Debug level 0-5 (default: 0)
|
||||
-e, --extensions=LIST Comma-separated list of extensions to load
|
||||
Formats: vector, pgcrypto (built-in/contrib)
|
||||
@org/package/path:exportedName (npm package)
|
||||
-r, --run=COMMAND Command to run after server starts
|
||||
--include-database-url Include DATABASE_URL in subprocess environment
|
||||
--shutdown-timeout=MS Timeout for graceful subprocess shutdown in ms (default: 5000)
|
||||
`,l=class{constructor(e){this.db=null;this.server=null;this.subprocessManager=null;this.config=e}static parseConfig(){let e=r.values.extensions;return{dbPath:r.values.db,port:parseInt(r.values.port,10),host:r.values.host,path:r.values.path,debugLevel:parseInt(r.values.debug,10),extensionNames:e?e.split(",").map(o=>o.trim()):void 0,runCommand:r.values.run,includeDatabaseUrl:r.values["include-database-url"],shutdownTimeout:parseInt(r.values["shutdown-timeout"],10)}}createDatabaseUrl(){let{host:e,port:o,path:t}=this.config;if(t){let s=t.endsWith("/.s.PGSQL.5432")?t.slice(0,-13):t;return`postgresql://postgres:postgres@/postgres?host=${encodeURIComponent(s)}`}else return`postgresql://postgres:postgres@${e}:${o}/postgres`}async importExtensions(){if(!this.config.extensionNames?.length)return;let e={},o=["vector","live","pg_hashids","pg_ivm","pg_uuidv7","pgtap"];for(let t of this.config.extensionNames){let s=null;try{if(t.includes(":")){let[i,n]=t.split(":");if(!i||!n)throw new Error(`Invalid extension format '${t}'. Expected: package/path:exportedName`);s=(await import(i))[n],s&&(e[n]=s,console.log(`Imported extension '${n}' from '${i}'`))}else if(o.includes(t))s=(await import(`@electric-sql/pglite/${t}`))[t],s&&(e[t]=s,console.log(`Imported extension: ${t}`));else{try{s=(await import(`@electric-sql/pglite/contrib/${t}`))[t]}catch{s=(await import(`@electric-sql/pglite-${t}`))[t]}s&&(e[t]=s,console.log(`Imported extension: ${t}`))}}catch(i){throw console.error(`Failed to import extension '${t}':`,i),new Error(`Failed to import extension '${t}'`)}}return Object.keys(e).length>0?e:void 0}async initializeDatabase(){console.log(`Initializing PGLite with database: ${this.config.dbPath}`),console.log(`Debug level: ${this.config.debugLevel}`);let e=await this.importExtensions();this.db=new h(this.config.dbPath,{debug:this.config.debugLevel,extensions:e}),await this.db.waitReady,console.log("PGlite database initialized")}setupServerEventHandlers(){if(!this.server||!this.subprocessManager)throw new Error("Server or subprocess manager not initialized");this.server.addEventListener("listening",e=>{let o=e.detail;if(console.log(`PGLiteSocketServer listening on ${JSON.stringify(o)}`),this.config.runCommand&&this.subprocessManager){let t=this.createDatabaseUrl();this.subprocessManager.spawn(this.config.runCommand,t,this.config.includeDatabaseUrl)}}),this.server.addEventListener("connection",e=>{let{clientAddress:o,clientPort:t}=e.detail;console.log(`Client connected from ${o}:${t}`)}),this.server.addEventListener("error",e=>{let o=e.detail;console.error("Socket server error:",o)})}setupSignalHandlers(){process.on("SIGINT",()=>this.shutdown()),process.on("SIGTERM",()=>this.shutdown())}async start(){try{if(await this.initializeDatabase(),!this.db)throw new Error("Database initialization failed");this.server=new d({db:this.db,port:this.config.port,host:this.config.host,path:this.config.path,inspect:this.config.debugLevel>0}),this.subprocessManager=new c(e=>{this.shutdown(e)}),this.setupServerEventHandlers(),this.setupSignalHandlers(),await this.server.start()}catch(e){throw console.error("Failed to start PGLiteSocketServer:",e),e}}async shutdown(e=0){console.log(`
|
||||
Shutting down PGLiteSocketServer...`),this.subprocessManager&&this.subprocessManager.terminate(this.config.shutdownTimeout),this.server&&await this.server.stop(),this.db&&await this.db.close(),console.log("Server stopped"),process.exit(e)}},c=class{constructor(e){this.childProcess=null;this.onExit=e}get process(){return this.childProcess}spawn(e,o,t){console.log(`Running command: ${e}`);let s={...process.env};t&&(s.DATABASE_URL=o,console.log(`Setting DATABASE_URL=${o}`));let i=e.trim().split(/\s+/);this.childProcess=p(i[0],i.slice(1),{env:s,stdio:"inherit"}),this.childProcess.on("error",n=>{console.error("Error running command:",n),console.log("Subprocess failed to start, shutting down..."),this.onExit(1)}),this.childProcess.on("close",n=>{console.log(`Command exited with code ${n}`),this.childProcess=null,n!==null&&n!==0&&(console.log(`Child process failed with exit code ${n}, shutting down...`),this.onExit(n))})}terminate(e){this.childProcess&&(console.log("Terminating child process..."),this.childProcess.kill("SIGTERM"),setTimeout(()=>{this.childProcess&&!this.childProcess.killed&&(console.log("Force killing child process..."),this.childProcess.kill("SIGKILL"))},e))}};async function m(){r.values.help&&(console.log(g),process.exit(0));try{let a=l.parseConfig();await new l(a).start()}catch(a){console.error("Unhandled error:",a),process.exit(1)}}m();
|
||||
//# sourceMappingURL=server.js.map
|
||||
1
_node_modules/@electric-sql/pglite-socket/dist/scripts/server.js.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite-socket/dist/scripts/server.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
29
_node_modules/@electric-sql/pglite-socket/eslint.config.js
generated
Normal file
29
_node_modules/@electric-sql/pglite-socket/eslint.config.js
generated
Normal file
@@ -0,0 +1,29 @@
|
||||
import globals from 'globals'
|
||||
import rootConfig from '../../eslint.config.js'
|
||||
|
||||
export default [
|
||||
...rootConfig,
|
||||
{
|
||||
ignores: ['release/**/*', 'examples/**/*', 'dist/**/*'],
|
||||
},
|
||||
{
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.browser,
|
||||
...globals.node,
|
||||
},
|
||||
},
|
||||
rules: {
|
||||
...rootConfig.rules,
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['tests/targets/deno/**/*.js'],
|
||||
languageOptions: {
|
||||
globals: {
|
||||
Deno: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
76
_node_modules/@electric-sql/pglite-socket/examples/basic-server.ts
generated
Normal file
76
_node_modules/@electric-sql/pglite-socket/examples/basic-server.ts
generated
Normal file
@@ -0,0 +1,76 @@
|
||||
import { PGLiteSocketServer } from '../src'
|
||||
import { PGlite, DebugLevel } from '@electric-sql/pglite'
|
||||
|
||||
/*
|
||||
* This is a basic example of how to use the PGLiteSocketServer class.
|
||||
* It creates a PGlite instance and a PGLiteSocketServer instance and starts the server.
|
||||
* It also handles SIGINT to stop the server and close the database.
|
||||
* You can run this example with the following command:
|
||||
*
|
||||
* ```bash
|
||||
* pnpm tsx examples/basic-server.ts
|
||||
* ```
|
||||
* or with the handy script:
|
||||
* ```bash
|
||||
* pnpm example:basic-server
|
||||
* ```
|
||||
*
|
||||
* You can set the host and port with the following environment variables:
|
||||
*
|
||||
* ```bash
|
||||
* HOST=127.0.0.1 PORT=5432 DEBUG=1 pnpm tsx examples/basic-server.ts
|
||||
* ```
|
||||
*
|
||||
* Debug level can be set to 0, 1, 2, 3, or 4.
|
||||
*
|
||||
* ```bash
|
||||
* DEBUG=1 pnpm tsx examples/basic-server.ts
|
||||
* ```
|
||||
* You can also use a UNIX socket instead of the host:port
|
||||
*
|
||||
* ```bash
|
||||
* UNIX=/tmp/.s.PGSQL.5432 DEBUG=1 pnpm tsx examples/basic-server.ts
|
||||
* ```
|
||||
*/
|
||||
|
||||
const UNIX = process.env.UNIX
|
||||
const PORT = process.env.PORT ? parseInt(process.env.PORT) : 5432
|
||||
const HOST = process.env.HOST ?? '127.0.0.1'
|
||||
const DEBUG = process.env.DEBUG
|
||||
? (parseInt(process.env.DEBUG) as DebugLevel)
|
||||
: 0
|
||||
|
||||
// Create a PGlite instance
|
||||
const db = await PGlite.create({
|
||||
debug: DEBUG,
|
||||
})
|
||||
|
||||
// Check if the database is working
|
||||
console.log(await db.query('SELECT version()'))
|
||||
|
||||
// Create a PGLiteSocketServer instance
|
||||
const server = new PGLiteSocketServer({
|
||||
db,
|
||||
port: PORT,
|
||||
host: HOST,
|
||||
path: UNIX,
|
||||
inspect: !!DEBUG, // Print the incoming and outgoing data to the console
|
||||
})
|
||||
|
||||
server.addEventListener('listening', (event) => {
|
||||
const detail = (
|
||||
event as CustomEvent<{ port: number; host: string } | { host: string }>
|
||||
).detail
|
||||
console.log(`Server listening on ${JSON.stringify(detail)}`)
|
||||
})
|
||||
|
||||
// Start the server
|
||||
await server.start()
|
||||
|
||||
// Handle SIGINT to stop the server and close the database
|
||||
process.on('SIGINT', async () => {
|
||||
await server.stop()
|
||||
await db.close()
|
||||
console.log('Server stopped and database closed')
|
||||
process.exit(0)
|
||||
})
|
||||
69
_node_modules/@electric-sql/pglite-socket/package.json
generated
Normal file
69
_node_modules/@electric-sql/pglite-socket/package.json
generated
Normal file
@@ -0,0 +1,69 @@
|
||||
{
|
||||
"name": "@electric-sql/pglite-socket",
|
||||
"version": "0.0.20",
|
||||
"description": "A socket implementation for PGlite enabling remote connections",
|
||||
"author": "Electric DB Limited",
|
||||
"homepage": "https://pglite.dev",
|
||||
"license": "Apache-2.0",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/electric-sql/pglite",
|
||||
"directory": "packages/pglite-socket"
|
||||
},
|
||||
"keywords": [
|
||||
"postgres",
|
||||
"sql",
|
||||
"database",
|
||||
"wasm",
|
||||
"pglite",
|
||||
"socket"
|
||||
],
|
||||
"private": false,
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"type": "module",
|
||||
"main": "dist/index.cjs",
|
||||
"module": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"default": "./dist/index.js"
|
||||
},
|
||||
"require": {
|
||||
"types": "./dist/index.d.cts",
|
||||
"default": "./dist/index.cjs"
|
||||
}
|
||||
}
|
||||
},
|
||||
"bin": {
|
||||
"pglite-server": "./dist/scripts/server.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@arethetypeswrong/cli": "^0.18.1",
|
||||
"@types/emscripten": "^1.41.1",
|
||||
"@types/node": "^20.16.11",
|
||||
"pg": "^8.14.0",
|
||||
"postgres": "^3.4.5",
|
||||
"tsx": "^4.19.2",
|
||||
"vitest": "^1.3.1",
|
||||
"@electric-sql/pg-protocol": "0.0.4",
|
||||
"@electric-sql/pglite": "0.3.15"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@electric-sql/pglite": "0.3.15"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsup",
|
||||
"check:exports": "attw . --pack --profile node16",
|
||||
"lint": "eslint ./src ./tests --report-unused-disable-directives --max-warnings 0",
|
||||
"format": "prettier --write ./src ./tests",
|
||||
"typecheck": "tsc",
|
||||
"stylecheck": "pnpm lint && prettier --check ./src ./tests",
|
||||
"test": "vitest",
|
||||
"example:basic-server": "tsx examples/basic-server.ts",
|
||||
"pglite-server:dev": "tsx --watch src/scripts/server.ts"
|
||||
}
|
||||
}
|
||||
720
_node_modules/@electric-sql/pglite-socket/src/index.ts
generated
Normal file
720
_node_modules/@electric-sql/pglite-socket/src/index.ts
generated
Normal file
@@ -0,0 +1,720 @@
|
||||
import type { PGlite } from '@electric-sql/pglite'
|
||||
import { createServer, Server, Socket } from 'net'
|
||||
|
||||
// Connection queue timeout in milliseconds
|
||||
export const CONNECTION_QUEUE_TIMEOUT = 60000 // 60 seconds
|
||||
|
||||
/**
|
||||
* Options for creating a PGLiteSocketHandler
|
||||
*/
|
||||
export interface PGLiteSocketHandlerOptions {
|
||||
/** The PGlite database instance */
|
||||
db: PGlite
|
||||
/** Whether to close the socket when detached (default: false) */
|
||||
closeOnDetach?: boolean
|
||||
/** Print the incoming and outgoing data to the console in hex and ascii */
|
||||
inspect?: boolean
|
||||
/** Enable debug logging of method calls */
|
||||
debug?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Low-level handler for a single socket connection to PGLite
|
||||
* Handles the raw protocol communication between a socket and PGLite
|
||||
*/
|
||||
export class PGLiteSocketHandler extends EventTarget {
|
||||
readonly db: PGlite
|
||||
private socket: Socket | null = null
|
||||
private active = false
|
||||
private closeOnDetach: boolean
|
||||
private resolveLock?: () => void
|
||||
private rejectLock?: (err: Error) => void
|
||||
private inspect: boolean
|
||||
private debug: boolean
|
||||
private readonly id: number
|
||||
|
||||
// Static counter for generating unique handler IDs
|
||||
private static nextHandlerId = 1
|
||||
|
||||
/**
|
||||
* Create a new PGLiteSocketHandler
|
||||
* @param options Options for the handler
|
||||
*/
|
||||
constructor(options: PGLiteSocketHandlerOptions) {
|
||||
super()
|
||||
this.db = options.db
|
||||
this.closeOnDetach = options.closeOnDetach ?? false
|
||||
this.inspect = options.inspect ?? false
|
||||
this.debug = options.debug ?? false
|
||||
this.id = PGLiteSocketHandler.nextHandlerId++
|
||||
|
||||
this.log('constructor: created new handler')
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the unique ID of this handler
|
||||
*/
|
||||
public get handlerId(): number {
|
||||
return this.id
|
||||
}
|
||||
|
||||
/**
|
||||
* Log a message if debug is enabled
|
||||
* @private
|
||||
*/
|
||||
private log(message: string, ...args: any[]): void {
|
||||
if (this.debug) {
|
||||
console.log(`[PGLiteSocketHandler#${this.id}] ${message}`, ...args)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attach a socket to this handler
|
||||
* @param socket The socket to attach
|
||||
* @returns this handler instance
|
||||
* @throws Error if a socket is already attached
|
||||
*/
|
||||
public async attach(socket: Socket): Promise<PGLiteSocketHandler> {
|
||||
this.log(
|
||||
`attach: attaching socket from ${socket.remoteAddress}:${socket.remotePort}`,
|
||||
)
|
||||
|
||||
if (this.socket) {
|
||||
throw new Error('Socket already attached')
|
||||
}
|
||||
|
||||
this.socket = socket
|
||||
this.active = true
|
||||
|
||||
// Ensure the PGlite instance is ready
|
||||
this.log(`attach: waiting for PGlite to be ready`)
|
||||
await this.db.waitReady
|
||||
|
||||
// Hold the lock on the PGlite instance
|
||||
this.log(`attach: acquiring exclusive lock on PGlite instance`)
|
||||
await new Promise<void>((resolve) => {
|
||||
this.db.runExclusive(() => {
|
||||
// Ensure we have the lock on the PGlite instance
|
||||
resolve()
|
||||
|
||||
// Use a promise to hold the lock on the PGlite instance
|
||||
// this can be resolved or rejected by the handler to release the lock
|
||||
return new Promise<void>((resolveLock, rejectLock) => {
|
||||
this.resolveLock = resolveLock
|
||||
this.rejectLock = rejectLock
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// Setup event handlers
|
||||
this.log(`attach: setting up socket event handlers`)
|
||||
socket.on('data', async (data) => {
|
||||
try {
|
||||
const result = await this.handleData(data)
|
||||
this.log(`socket on data sent: ${result} bytes`)
|
||||
} catch (err) {
|
||||
this.log('socket on data error: ', err)
|
||||
}
|
||||
})
|
||||
socket.on('error', (err) => this.handleError(err))
|
||||
socket.on('close', () => this.handleClose())
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Detach the current socket from this handler
|
||||
* @param close Whether to close the socket when detaching (overrides constructor option)
|
||||
* @returns this handler instance
|
||||
*/
|
||||
public detach(close?: boolean): PGLiteSocketHandler {
|
||||
this.log(`detach: detaching socket, close=${close ?? this.closeOnDetach}`)
|
||||
|
||||
if (!this.socket) {
|
||||
this.log(`detach: no socket attached, nothing to do`)
|
||||
return this
|
||||
}
|
||||
|
||||
// Remove all listeners
|
||||
this.socket.removeAllListeners('data')
|
||||
this.socket.removeAllListeners('error')
|
||||
this.socket.removeAllListeners('close')
|
||||
|
||||
// Close the socket if requested
|
||||
if (close ?? this.closeOnDetach) {
|
||||
if (this.socket.writable) {
|
||||
this.log(`detach: closing socket`)
|
||||
this.socket.end()
|
||||
this.socket.destroy()
|
||||
}
|
||||
}
|
||||
|
||||
// Release the lock on the PGlite instance
|
||||
this.log(`detach: releasing exclusive lock on PGlite instance`)
|
||||
this.resolveLock?.()
|
||||
|
||||
this.socket = null
|
||||
this.active = false
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a socket is currently attached
|
||||
*/
|
||||
public get isAttached(): boolean {
|
||||
return this.socket !== null
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle incoming data from the socket
|
||||
*/
|
||||
private async handleData(data: Buffer): Promise<number> {
|
||||
if (!this.socket || !this.active) {
|
||||
this.log(`handleData: no active socket, ignoring data`)
|
||||
return new Promise((_, reject) => reject(`no active socket`))
|
||||
}
|
||||
|
||||
this.log(`handleData: received ${data.length} bytes`)
|
||||
|
||||
// Print the incoming data to the console
|
||||
this.inspectData('incoming', data)
|
||||
|
||||
try {
|
||||
// Process the raw protocol data
|
||||
this.log(`handleData: sending data to PGlite for processing`)
|
||||
const result = await this.db.execProtocolRaw(new Uint8Array(data))
|
||||
|
||||
this.log(`handleData: received ${result.length} bytes from PGlite`)
|
||||
|
||||
// Print the outgoing data to the console
|
||||
this.inspectData('outgoing', result)
|
||||
|
||||
// Send the result back if the socket is still connected
|
||||
if (this.socket && this.socket.writable && this.active) {
|
||||
if (result.length <= 0) {
|
||||
this.log(`handleData: cowardly refusing to send empty packet`)
|
||||
return new Promise((_, reject) => reject('no data'))
|
||||
}
|
||||
|
||||
const promise = new Promise<number>((resolve, reject) => {
|
||||
this.log(`handleData: writing response to socket`)
|
||||
if (this.socket) {
|
||||
this.socket.write(Buffer.from(result), (err?: Error) => {
|
||||
if (err) {
|
||||
reject(`Error while writing to the socket ${err.toString()}`)
|
||||
} else {
|
||||
resolve(result.length)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
reject(`No socket`)
|
||||
}
|
||||
})
|
||||
|
||||
// Emit data event with byte sizes
|
||||
this.dispatchEvent(
|
||||
new CustomEvent('data', {
|
||||
detail: { incoming: data.length, outgoing: result.length },
|
||||
}),
|
||||
)
|
||||
return promise
|
||||
} else {
|
||||
this.log(
|
||||
`handleData: socket no longer writable or active, discarding response`,
|
||||
)
|
||||
return new Promise((_, reject) =>
|
||||
reject(`No socket, not active or not writeable`),
|
||||
)
|
||||
}
|
||||
} catch (err) {
|
||||
this.log(`handleData: error processing data:`, err)
|
||||
this.handleError(err as Error)
|
||||
return new Promise((_, reject) =>
|
||||
reject(`Error while processing data ${(err as Error).toString()}`),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle errors from the socket
|
||||
*/
|
||||
private handleError(err: Error): void {
|
||||
this.log(`handleError:`, err)
|
||||
|
||||
// Emit error event
|
||||
this.dispatchEvent(new CustomEvent('error', { detail: err }))
|
||||
|
||||
// Reject the lock on the PGlite instance
|
||||
this.log(`handleError: rejecting exclusive lock on PGlite instance`)
|
||||
this.rejectLock?.(err)
|
||||
this.resolveLock = undefined
|
||||
this.rejectLock = undefined
|
||||
|
||||
// Close the connection on error
|
||||
this.detach(true)
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle socket close event
|
||||
*/
|
||||
private handleClose(): void {
|
||||
this.log(`handleClose: socket closed`)
|
||||
|
||||
this.dispatchEvent(new CustomEvent('close'))
|
||||
this.detach(false) // Already closed, just clean up
|
||||
}
|
||||
|
||||
/**
|
||||
* Print data in hex and ascii to the console
|
||||
*/
|
||||
private inspectData(
|
||||
direction: 'incoming' | 'outgoing',
|
||||
data: Buffer | Uint8Array,
|
||||
): void {
|
||||
if (!this.inspect) return
|
||||
console.log('-'.repeat(75))
|
||||
if (direction === 'incoming') {
|
||||
console.log('-> incoming', data.length, 'bytes')
|
||||
} else {
|
||||
console.log('<- outgoing', data.length, 'bytes')
|
||||
}
|
||||
|
||||
// Process 16 bytes per line
|
||||
for (let offset = 0; offset < data.length; offset += 16) {
|
||||
// Calculate current chunk size (may be less than 16 for the last chunk)
|
||||
const chunkSize = Math.min(16, data.length - offset)
|
||||
|
||||
// Build the hex representation
|
||||
let hexPart = ''
|
||||
for (let i = 0; i < 16; i++) {
|
||||
if (i < chunkSize) {
|
||||
const byte = data[offset + i]
|
||||
hexPart += byte.toString(16).padStart(2, '0') + ' '
|
||||
} else {
|
||||
hexPart += ' ' // 3 spaces for missing bytes
|
||||
}
|
||||
}
|
||||
|
||||
// Build the ASCII representation
|
||||
let asciiPart = ''
|
||||
for (let i = 0; i < chunkSize; i++) {
|
||||
const byte = data[offset + i]
|
||||
// Use printable characters (32-126), replace others with a dot
|
||||
asciiPart += byte >= 32 && byte <= 126 ? String.fromCharCode(byte) : '.'
|
||||
}
|
||||
|
||||
// Print the line with offset in hex, hex values, and ASCII representation
|
||||
console.log(
|
||||
`${offset.toString(16).padStart(8, '0')} ${hexPart} ${asciiPart}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a queued connection with timeout
|
||||
*/
|
||||
interface QueuedConnection {
|
||||
socket: Socket
|
||||
clientInfo: {
|
||||
clientAddress: string
|
||||
clientPort: number
|
||||
}
|
||||
timeoutId: NodeJS.Timeout
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for creating a PGLiteSocketServer
|
||||
*/
|
||||
export interface PGLiteSocketServerOptions {
|
||||
/** The PGlite database instance */
|
||||
db: PGlite
|
||||
/** The port to listen on (default: 5432) */
|
||||
port?: number
|
||||
/** The host to bind to (default: 127.0.0.1) */
|
||||
host?: string
|
||||
/** Unix socket path to bind to (default: undefined). If specified, takes precedence over host:port */
|
||||
path?: string
|
||||
/** Print the incoming and outgoing data to the console in hex and ascii */
|
||||
inspect?: boolean
|
||||
/** Connection queue timeout in milliseconds (default: 10000) */
|
||||
connectionQueueTimeout?: number
|
||||
/** Enable debug logging of method calls */
|
||||
debug?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* High-level server that manages socket connections to PGLite
|
||||
* Creates and manages a TCP server and handles client connections
|
||||
*/
|
||||
export class PGLiteSocketServer extends EventTarget {
|
||||
readonly db: PGlite
|
||||
private server: Server | null = null
|
||||
private port?: number
|
||||
private host?: string
|
||||
private path?: string
|
||||
private active = false
|
||||
private inspect: boolean
|
||||
private debug: boolean
|
||||
private connectionQueueTimeout: number
|
||||
private activeHandler: PGLiteSocketHandler | null = null
|
||||
private connectionQueue: QueuedConnection[] = []
|
||||
private handlerCount: number = 0
|
||||
|
||||
/**
|
||||
* Create a new PGLiteSocketServer
|
||||
* @param options Options for the server
|
||||
*/
|
||||
constructor(options: PGLiteSocketServerOptions) {
|
||||
super()
|
||||
this.db = options.db
|
||||
if (options.path) {
|
||||
this.path = options.path
|
||||
} else {
|
||||
if (typeof options.port === 'number') {
|
||||
// Keep port undefined on port 0, will be set by the OS when we start the server.
|
||||
this.port = options.port ?? options.port
|
||||
} else {
|
||||
this.port = 5432
|
||||
}
|
||||
this.host = options.host || '127.0.0.1'
|
||||
}
|
||||
this.inspect = options.inspect ?? false
|
||||
this.debug = options.debug ?? false
|
||||
this.connectionQueueTimeout =
|
||||
options.connectionQueueTimeout ?? CONNECTION_QUEUE_TIMEOUT
|
||||
|
||||
this.log(`constructor: created server on ${this.host}:${this.port}`)
|
||||
this.log(
|
||||
`constructor: connection queue timeout: ${this.connectionQueueTimeout}ms`,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Log a message if debug is enabled
|
||||
* @private
|
||||
*/
|
||||
private log(message: string, ...args: any[]): void {
|
||||
if (this.debug) {
|
||||
console.log(`[PGLiteSocketServer] ${message}`, ...args)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the socket server
|
||||
* @returns Promise that resolves when the server is listening
|
||||
*/
|
||||
public async start(): Promise<void> {
|
||||
this.log(`start: starting server on ${this.getServerConn()}`)
|
||||
|
||||
if (this.server) {
|
||||
throw new Error('Socket server already started')
|
||||
}
|
||||
|
||||
this.active = true
|
||||
this.server = createServer((socket) => this.handleConnection(socket))
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
if (!this.server) return reject(new Error('Server not initialized'))
|
||||
|
||||
this.server.on('error', (err) => {
|
||||
this.log(`start: server error:`, err)
|
||||
this.dispatchEvent(new CustomEvent('error', { detail: err }))
|
||||
reject(err)
|
||||
})
|
||||
|
||||
if (this.path) {
|
||||
this.server.listen(this.path, () => {
|
||||
this.log(`start: server listening on ${this.getServerConn()}`)
|
||||
this.dispatchEvent(
|
||||
new CustomEvent('listening', {
|
||||
detail: { path: this.path },
|
||||
}),
|
||||
)
|
||||
resolve()
|
||||
})
|
||||
} else {
|
||||
const server = this.server
|
||||
server.listen(this.port, this.host, () => {
|
||||
const address = server.address()
|
||||
// We are not using pipes, so return type should be AddressInfo
|
||||
if (address === null || typeof address !== 'object') {
|
||||
throw Error('Expected address info')
|
||||
}
|
||||
// Assign the new port number
|
||||
this.port = address.port
|
||||
this.log(`start: server listening on ${this.getServerConn()}`)
|
||||
this.dispatchEvent(
|
||||
new CustomEvent('listening', {
|
||||
detail: { port: this.port, host: this.host },
|
||||
}),
|
||||
)
|
||||
resolve()
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
public getServerConn(): string {
|
||||
if (this.path) return this.path
|
||||
return `${this.host}:${this.port}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the socket server
|
||||
* @returns Promise that resolves when the server is closed
|
||||
*/
|
||||
public async stop(): Promise<void> {
|
||||
this.log(`stop: stopping server`)
|
||||
|
||||
this.active = false
|
||||
|
||||
// Clear connection queue
|
||||
this.log(
|
||||
`stop: clearing connection queue (${this.connectionQueue.length} connections)`,
|
||||
)
|
||||
|
||||
this.connectionQueue.forEach((queuedConn) => {
|
||||
clearTimeout(queuedConn.timeoutId)
|
||||
if (queuedConn.socket.writable) {
|
||||
this.log(
|
||||
`stop: closing queued connection from ${queuedConn.clientInfo.clientAddress}:${queuedConn.clientInfo.clientPort}`,
|
||||
)
|
||||
queuedConn.socket.end()
|
||||
}
|
||||
})
|
||||
this.connectionQueue = []
|
||||
|
||||
// Detach active handler if exists
|
||||
if (this.activeHandler) {
|
||||
this.log(`stop: detaching active handler #${this.activeHandlerId}`)
|
||||
this.activeHandler.detach(true)
|
||||
this.activeHandler = null
|
||||
}
|
||||
|
||||
if (!this.server) {
|
||||
this.log(`stop: server not running, nothing to do`)
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
return new Promise<void>((resolve) => {
|
||||
if (!this.server) return resolve()
|
||||
|
||||
this.server.close(() => {
|
||||
this.log(`stop: server closed`)
|
||||
this.server = null
|
||||
this.dispatchEvent(new CustomEvent('close'))
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the active handler ID, or null if no active handler
|
||||
*/
|
||||
private get activeHandlerId(): number | null {
|
||||
return this.activeHandler?.handlerId ?? null
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle a new client connection
|
||||
*/
|
||||
private async handleConnection(socket: Socket): Promise<void> {
|
||||
const clientInfo = {
|
||||
clientAddress: socket.remoteAddress || 'unknown',
|
||||
clientPort: socket.remotePort || 0,
|
||||
}
|
||||
|
||||
this.log(
|
||||
`handleConnection: new connection from ${clientInfo.clientAddress}:${clientInfo.clientPort}`,
|
||||
)
|
||||
|
||||
// If server is not active, close the connection immediately
|
||||
if (!this.active) {
|
||||
this.log(`handleConnection: server not active, closing connection`)
|
||||
socket.end()
|
||||
return
|
||||
}
|
||||
|
||||
// If we don't have an active handler or it's not attached, we can use this connection immediately
|
||||
if (!this.activeHandler || !this.activeHandler.isAttached) {
|
||||
this.log(`handleConnection: no active handler, attaching socket directly`)
|
||||
this.dispatchEvent(new CustomEvent('connection', { detail: clientInfo }))
|
||||
await this.attachSocketToNewHandler(socket, clientInfo)
|
||||
return
|
||||
}
|
||||
|
||||
// Otherwise, queue the connection
|
||||
this.log(
|
||||
`handleConnection: active handler #${this.activeHandlerId} exists, queueing connection`,
|
||||
)
|
||||
this.enqueueConnection(socket, clientInfo)
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a connection to the queue
|
||||
*/
|
||||
private enqueueConnection(
|
||||
socket: Socket,
|
||||
clientInfo: { clientAddress: string; clientPort: number },
|
||||
): void {
|
||||
this.log(
|
||||
`enqueueConnection: queueing connection from ${clientInfo.clientAddress}:${clientInfo.clientPort}, timeout: ${this.connectionQueueTimeout}ms`,
|
||||
)
|
||||
|
||||
// Set a timeout for this queued connection
|
||||
const timeoutId = setTimeout(() => {
|
||||
this.log(
|
||||
`enqueueConnection: timeout for connection from ${clientInfo.clientAddress}:${clientInfo.clientPort}`,
|
||||
)
|
||||
|
||||
// Remove from queue
|
||||
this.connectionQueue = this.connectionQueue.filter(
|
||||
(queuedConn) => queuedConn.socket !== socket,
|
||||
)
|
||||
|
||||
// End the connection if it's still open
|
||||
if (socket.writable) {
|
||||
this.log(`enqueueConnection: closing timed out connection`)
|
||||
socket.end()
|
||||
}
|
||||
|
||||
this.dispatchEvent(
|
||||
new CustomEvent('queueTimeout', {
|
||||
detail: { ...clientInfo, queueSize: this.connectionQueue.length },
|
||||
}),
|
||||
)
|
||||
}, this.connectionQueueTimeout)
|
||||
|
||||
// Add to queue
|
||||
this.connectionQueue.push({ socket, clientInfo, timeoutId })
|
||||
|
||||
this.log(
|
||||
`enqueueConnection: connection queued, queue size: ${this.connectionQueue.length}`,
|
||||
)
|
||||
|
||||
this.dispatchEvent(
|
||||
new CustomEvent('queuedConnection', {
|
||||
detail: { ...clientInfo, queueSize: this.connectionQueue.length },
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Process the next connection in the queue
|
||||
*/
|
||||
private processNextInQueue(): void {
|
||||
this.log(
|
||||
`processNextInQueue: processing next connection, queue size: ${this.connectionQueue.length}`,
|
||||
)
|
||||
|
||||
// No connections in queue or server not active
|
||||
if (this.connectionQueue.length === 0 || !this.active) {
|
||||
this.log(
|
||||
`processNextInQueue: no connections in queue or server not active, nothing to do`,
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
// Get the next connection
|
||||
const nextConn = this.connectionQueue.shift()
|
||||
if (!nextConn) return
|
||||
|
||||
this.log(
|
||||
`processNextInQueue: processing connection from ${nextConn.clientInfo.clientAddress}:${nextConn.clientInfo.clientPort}`,
|
||||
)
|
||||
|
||||
// Clear the timeout
|
||||
clearTimeout(nextConn.timeoutId)
|
||||
|
||||
// Check if the socket is still valid
|
||||
if (!nextConn.socket.writable) {
|
||||
this.log(
|
||||
`processNextInQueue: socket no longer writable, skipping to next connection`,
|
||||
)
|
||||
// Socket closed while waiting, process next in queue
|
||||
this.processNextInQueue()
|
||||
return
|
||||
}
|
||||
|
||||
// Attach this socket to a new handler
|
||||
this.attachSocketToNewHandler(nextConn.socket, nextConn.clientInfo).catch(
|
||||
(err) => {
|
||||
this.log(`processNextInQueue: error attaching socket:`, err)
|
||||
this.dispatchEvent(new CustomEvent('error', { detail: err }))
|
||||
// Try the next connection
|
||||
this.processNextInQueue()
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Attach a socket to a new handler
|
||||
*/
|
||||
private async attachSocketToNewHandler(
|
||||
socket: Socket,
|
||||
clientInfo: { clientAddress: string; clientPort: number },
|
||||
): Promise<void> {
|
||||
this.handlerCount++
|
||||
|
||||
this.log(
|
||||
`attachSocketToNewHandler: creating new handler for ${clientInfo.clientAddress}:${clientInfo.clientPort} (handler #${this.handlerCount})`,
|
||||
)
|
||||
|
||||
// Create a new handler for this connection
|
||||
const handler = new PGLiteSocketHandler({
|
||||
db: this.db,
|
||||
closeOnDetach: true,
|
||||
inspect: this.inspect,
|
||||
debug: this.debug,
|
||||
})
|
||||
|
||||
// Forward error events from the handler
|
||||
handler.addEventListener('error', (event) => {
|
||||
this.log(
|
||||
`handler #${handler.handlerId}: error from handler:`,
|
||||
(event as CustomEvent<Error>).detail,
|
||||
)
|
||||
this.dispatchEvent(
|
||||
new CustomEvent('error', {
|
||||
detail: (event as CustomEvent<Error>).detail,
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
// Handle close event to process next queued connection
|
||||
handler.addEventListener('close', () => {
|
||||
this.log(`handler #${handler.handlerId}: closed`)
|
||||
|
||||
// If this is our active handler, clear it
|
||||
if (this.activeHandler === handler) {
|
||||
this.log(
|
||||
`handler #${handler.handlerId}: was active handler, processing next connection in queue`,
|
||||
)
|
||||
this.activeHandler = null
|
||||
// Process next connection in queue
|
||||
this.processNextInQueue()
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
// Set as active handler
|
||||
this.activeHandler = handler
|
||||
|
||||
this.log(`handler #${handler.handlerId}: attaching socket`)
|
||||
|
||||
// Attach the socket to the handler
|
||||
await handler.attach(socket)
|
||||
|
||||
this.dispatchEvent(new CustomEvent('connection', { detail: clientInfo }))
|
||||
} catch (err) {
|
||||
// If there was an error attaching, clean up
|
||||
this.log(`handler #${handler.handlerId}: error attaching socket:`, err)
|
||||
this.activeHandler = null
|
||||
if (socket.writable) {
|
||||
socket.end()
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
415
_node_modules/@electric-sql/pglite-socket/src/scripts/server.ts
generated
Normal file
415
_node_modules/@electric-sql/pglite-socket/src/scripts/server.ts
generated
Normal file
@@ -0,0 +1,415 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { PGlite, DebugLevel } from '@electric-sql/pglite'
|
||||
import type { Extension, Extensions } from '@electric-sql/pglite'
|
||||
import { PGLiteSocketServer } from '../index'
|
||||
import { parseArgs } from 'node:util'
|
||||
import { spawn, ChildProcess } from 'node:child_process'
|
||||
|
||||
// Define command line argument options
|
||||
const args = parseArgs({
|
||||
options: {
|
||||
db: {
|
||||
type: 'string',
|
||||
short: 'd',
|
||||
default: 'memory://',
|
||||
help: 'Database path (relative or absolute). Use memory:// for in-memory database.',
|
||||
},
|
||||
port: {
|
||||
type: 'string',
|
||||
short: 'p',
|
||||
default: '5432',
|
||||
help: 'Port to listen on',
|
||||
},
|
||||
host: {
|
||||
type: 'string',
|
||||
short: 'h',
|
||||
default: '127.0.0.1',
|
||||
help: 'Host to bind to',
|
||||
},
|
||||
path: {
|
||||
type: 'string',
|
||||
short: 'u',
|
||||
default: undefined,
|
||||
help: 'unix socket to bind to. Takes precedence over host:port',
|
||||
},
|
||||
debug: {
|
||||
type: 'string',
|
||||
short: 'v',
|
||||
default: '0',
|
||||
help: 'Debug level (0-5)',
|
||||
},
|
||||
extensions: {
|
||||
type: 'string',
|
||||
short: 'e',
|
||||
default: undefined,
|
||||
help: 'Comma-separated list of extensions to load (e.g., vector,pgcrypto)',
|
||||
},
|
||||
run: {
|
||||
type: 'string',
|
||||
short: 'r',
|
||||
default: undefined,
|
||||
help: 'Command to run after server starts',
|
||||
},
|
||||
'include-database-url': {
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
help: 'Include DATABASE_URL in the environment of the subprocess',
|
||||
},
|
||||
'shutdown-timeout': {
|
||||
type: 'string',
|
||||
default: '5000',
|
||||
help: 'Timeout in milliseconds for graceful subprocess shutdown (default: 5000)',
|
||||
},
|
||||
help: {
|
||||
type: 'boolean',
|
||||
short: '?',
|
||||
default: false,
|
||||
help: 'Show help',
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const help = `PGlite Socket Server
|
||||
Usage: pglite-server [options]
|
||||
|
||||
Options:
|
||||
-d, --db=PATH Database path (default: memory://)
|
||||
-p, --port=PORT Port to listen on (default: 5432)
|
||||
-h, --host=HOST Host to bind to (default: 127.0.0.1)
|
||||
-u, --path=UNIX Unix socket to bind to (default: undefined). Takes precedence over host:port
|
||||
-v, --debug=LEVEL Debug level 0-5 (default: 0)
|
||||
-e, --extensions=LIST Comma-separated list of extensions to load
|
||||
Formats: vector, pgcrypto (built-in/contrib)
|
||||
@org/package/path:exportedName (npm package)
|
||||
-r, --run=COMMAND Command to run after server starts
|
||||
--include-database-url Include DATABASE_URL in subprocess environment
|
||||
--shutdown-timeout=MS Timeout for graceful subprocess shutdown in ms (default: 5000)
|
||||
`
|
||||
|
||||
interface ServerConfig {
|
||||
dbPath: string
|
||||
port: number
|
||||
host: string
|
||||
path?: string
|
||||
debugLevel: DebugLevel
|
||||
extensionNames?: string[]
|
||||
runCommand?: string
|
||||
includeDatabaseUrl: boolean
|
||||
shutdownTimeout: number
|
||||
}
|
||||
|
||||
class PGLiteServerRunner {
|
||||
private config: ServerConfig
|
||||
private db: PGlite | null = null
|
||||
private server: PGLiteSocketServer | null = null
|
||||
private subprocessManager: SubprocessManager | null = null
|
||||
|
||||
constructor(config: ServerConfig) {
|
||||
this.config = config
|
||||
}
|
||||
|
||||
static parseConfig(): ServerConfig {
|
||||
const extensionsArg = args.values.extensions as string | undefined
|
||||
return {
|
||||
dbPath: args.values.db as string,
|
||||
port: parseInt(args.values.port as string, 10),
|
||||
host: args.values.host as string,
|
||||
path: args.values.path as string,
|
||||
debugLevel: parseInt(args.values.debug as string, 10) as DebugLevel,
|
||||
extensionNames: extensionsArg
|
||||
? extensionsArg.split(',').map((e) => e.trim())
|
||||
: undefined,
|
||||
runCommand: args.values.run as string,
|
||||
includeDatabaseUrl: args.values['include-database-url'] as boolean,
|
||||
shutdownTimeout: parseInt(args.values['shutdown-timeout'] as string, 10),
|
||||
}
|
||||
}
|
||||
|
||||
private createDatabaseUrl(): string {
|
||||
const { host, port, path } = this.config
|
||||
|
||||
if (path) {
|
||||
// Unix socket connection
|
||||
const socketDir = path.endsWith('/.s.PGSQL.5432')
|
||||
? path.slice(0, -13)
|
||||
: path
|
||||
return `postgresql://postgres:postgres@/postgres?host=${encodeURIComponent(socketDir)}`
|
||||
} else {
|
||||
// TCP connection
|
||||
return `postgresql://postgres:postgres@${host}:${port}/postgres`
|
||||
}
|
||||
}
|
||||
|
||||
private async importExtensions(): Promise<Extensions | undefined> {
|
||||
if (!this.config.extensionNames?.length) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const extensions: Extensions = {}
|
||||
|
||||
// Built-in extensions that are not in contrib
|
||||
const builtInExtensions = [
|
||||
'vector',
|
||||
'live',
|
||||
'pg_hashids',
|
||||
'pg_ivm',
|
||||
'pg_uuidv7',
|
||||
'pgtap',
|
||||
]
|
||||
|
||||
for (const name of this.config.extensionNames) {
|
||||
let ext: Extension | null = null
|
||||
|
||||
try {
|
||||
// Check if this is a custom package path (contains ':')
|
||||
// Format: @org/package/path:exportedName or package/path:exportedName
|
||||
if (name.includes(':')) {
|
||||
const [packagePath, exportName] = name.split(':')
|
||||
if (!packagePath || !exportName) {
|
||||
throw new Error(
|
||||
`Invalid extension format '${name}'. Expected: package/path:exportedName`,
|
||||
)
|
||||
}
|
||||
const mod = await import(packagePath)
|
||||
ext = mod[exportName] as Extension
|
||||
if (ext) {
|
||||
extensions[exportName] = ext
|
||||
console.log(
|
||||
`Imported extension '${exportName}' from '${packagePath}'`,
|
||||
)
|
||||
}
|
||||
} else if (builtInExtensions.includes(name)) {
|
||||
// Built-in extension (e.g., @electric-sql/pglite/vector)
|
||||
const mod = await import(`@electric-sql/pglite/${name}`)
|
||||
ext = mod[name] as Extension
|
||||
if (ext) {
|
||||
extensions[name] = ext
|
||||
console.log(`Imported extension: ${name}`)
|
||||
}
|
||||
} else {
|
||||
// Try contrib first (e.g., @electric-sql/pglite/contrib/pgcrypto)
|
||||
try {
|
||||
const mod = await import(`@electric-sql/pglite/contrib/${name}`)
|
||||
ext = mod[name] as Extension
|
||||
} catch {
|
||||
// Fall back to external package (e.g., @electric-sql/pglite-<extension>)
|
||||
const mod = await import(`@electric-sql/pglite-${name}`)
|
||||
ext = mod[name] as Extension
|
||||
}
|
||||
if (ext) {
|
||||
extensions[name] = ext
|
||||
console.log(`Imported extension: ${name}`)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to import extension '${name}':`, error)
|
||||
throw new Error(`Failed to import extension '${name}'`)
|
||||
}
|
||||
}
|
||||
|
||||
return Object.keys(extensions).length > 0 ? extensions : undefined
|
||||
}
|
||||
|
||||
private async initializeDatabase(): Promise<void> {
|
||||
console.log(`Initializing PGLite with database: ${this.config.dbPath}`)
|
||||
console.log(`Debug level: ${this.config.debugLevel}`)
|
||||
|
||||
const extensions = await this.importExtensions()
|
||||
|
||||
this.db = new PGlite(this.config.dbPath, {
|
||||
debug: this.config.debugLevel,
|
||||
extensions,
|
||||
})
|
||||
await this.db.waitReady
|
||||
console.log('PGlite database initialized')
|
||||
}
|
||||
|
||||
private setupServerEventHandlers(): void {
|
||||
if (!this.server || !this.subprocessManager) {
|
||||
throw new Error('Server or subprocess manager not initialized')
|
||||
}
|
||||
|
||||
this.server.addEventListener('listening', (event) => {
|
||||
const detail = (
|
||||
event as CustomEvent<{ port: number; host: string } | { host: string }>
|
||||
).detail
|
||||
console.log(`PGLiteSocketServer listening on ${JSON.stringify(detail)}`)
|
||||
|
||||
// Run the command after server starts listening
|
||||
if (this.config.runCommand && this.subprocessManager) {
|
||||
const databaseUrl = this.createDatabaseUrl()
|
||||
this.subprocessManager.spawn(
|
||||
this.config.runCommand,
|
||||
databaseUrl,
|
||||
this.config.includeDatabaseUrl,
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
this.server.addEventListener('connection', (event) => {
|
||||
const { clientAddress, clientPort } = (
|
||||
event as CustomEvent<{ clientAddress: string; clientPort: number }>
|
||||
).detail
|
||||
console.log(`Client connected from ${clientAddress}:${clientPort}`)
|
||||
})
|
||||
|
||||
this.server.addEventListener('error', (event) => {
|
||||
const error = (event as CustomEvent<Error>).detail
|
||||
console.error('Socket server error:', error)
|
||||
})
|
||||
}
|
||||
|
||||
private setupSignalHandlers(): void {
|
||||
process.on('SIGINT', () => this.shutdown())
|
||||
process.on('SIGTERM', () => this.shutdown())
|
||||
}
|
||||
|
||||
async start(): Promise<void> {
|
||||
try {
|
||||
// Initialize database
|
||||
await this.initializeDatabase()
|
||||
|
||||
if (!this.db) {
|
||||
throw new Error('Database initialization failed')
|
||||
}
|
||||
|
||||
// Create and setup the socket server
|
||||
this.server = new PGLiteSocketServer({
|
||||
db: this.db,
|
||||
port: this.config.port,
|
||||
host: this.config.host,
|
||||
path: this.config.path,
|
||||
inspect: this.config.debugLevel > 0,
|
||||
})
|
||||
|
||||
// Create subprocess manager
|
||||
this.subprocessManager = new SubprocessManager((exitCode) => {
|
||||
this.shutdown(exitCode)
|
||||
})
|
||||
|
||||
// Setup event handlers
|
||||
this.setupServerEventHandlers()
|
||||
this.setupSignalHandlers()
|
||||
|
||||
// Start the server
|
||||
await this.server.start()
|
||||
} catch (error) {
|
||||
console.error('Failed to start PGLiteSocketServer:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async shutdown(exitCode: number = 0): Promise<void> {
|
||||
console.log('\nShutting down PGLiteSocketServer...')
|
||||
|
||||
// Terminate subprocess if running
|
||||
if (this.subprocessManager) {
|
||||
this.subprocessManager.terminate(this.config.shutdownTimeout)
|
||||
}
|
||||
|
||||
// Stop server
|
||||
if (this.server) {
|
||||
await this.server.stop()
|
||||
}
|
||||
|
||||
// Close database
|
||||
if (this.db) {
|
||||
await this.db.close()
|
||||
}
|
||||
|
||||
console.log('Server stopped')
|
||||
process.exit(exitCode)
|
||||
}
|
||||
}
|
||||
|
||||
class SubprocessManager {
|
||||
private childProcess: ChildProcess | null = null
|
||||
private onExit: (code: number) => void
|
||||
|
||||
constructor(onExit: (code: number) => void) {
|
||||
this.onExit = onExit
|
||||
}
|
||||
|
||||
get process(): ChildProcess | null {
|
||||
return this.childProcess
|
||||
}
|
||||
|
||||
spawn(
|
||||
command: string,
|
||||
databaseUrl: string,
|
||||
includeDatabaseUrl: boolean,
|
||||
): void {
|
||||
console.log(`Running command: ${command}`)
|
||||
|
||||
// Prepare environment variables
|
||||
const env = { ...process.env }
|
||||
if (includeDatabaseUrl) {
|
||||
env.DATABASE_URL = databaseUrl
|
||||
console.log(`Setting DATABASE_URL=${databaseUrl}`)
|
||||
}
|
||||
|
||||
// Parse and spawn the command
|
||||
const commandParts = command.trim().split(/\s+/)
|
||||
this.childProcess = spawn(commandParts[0], commandParts.slice(1), {
|
||||
env,
|
||||
stdio: 'inherit',
|
||||
})
|
||||
|
||||
this.childProcess.on('error', (error) => {
|
||||
console.error('Error running command:', error)
|
||||
// If subprocess fails to start, shutdown the server
|
||||
console.log('Subprocess failed to start, shutting down...')
|
||||
this.onExit(1)
|
||||
})
|
||||
|
||||
this.childProcess.on('close', (code) => {
|
||||
console.log(`Command exited with code ${code}`)
|
||||
this.childProcess = null
|
||||
|
||||
// If child process exits with non-zero code, notify parent
|
||||
if (code !== null && code !== 0) {
|
||||
console.log(
|
||||
`Child process failed with exit code ${code}, shutting down...`,
|
||||
)
|
||||
this.onExit(code)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
terminate(timeout: number): void {
|
||||
if (this.childProcess) {
|
||||
console.log('Terminating child process...')
|
||||
this.childProcess.kill('SIGTERM')
|
||||
|
||||
// Give it a moment to exit gracefully, then force kill if needed
|
||||
setTimeout(() => {
|
||||
if (this.childProcess && !this.childProcess.killed) {
|
||||
console.log('Force killing child process...')
|
||||
this.childProcess.kill('SIGKILL')
|
||||
}
|
||||
}, timeout)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Main execution
|
||||
async function main() {
|
||||
// Show help and exit if requested
|
||||
if (args.values.help) {
|
||||
console.log(help)
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
try {
|
||||
const config = PGLiteServerRunner.parseConfig()
|
||||
const serverRunner = new PGLiteServerRunner(config)
|
||||
await serverRunner.start()
|
||||
} catch (error) {
|
||||
console.error('Unhandled error:', error)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// Run the main function
|
||||
main()
|
||||
512
_node_modules/@electric-sql/pglite-socket/tests/index.test.ts
generated
Normal file
512
_node_modules/@electric-sql/pglite-socket/tests/index.test.ts
generated
Normal file
@@ -0,0 +1,512 @@
|
||||
import {
|
||||
describe,
|
||||
it,
|
||||
expect,
|
||||
beforeEach,
|
||||
afterEach,
|
||||
vi,
|
||||
beforeAll,
|
||||
afterAll,
|
||||
} from 'vitest'
|
||||
import { PGlite } from '@electric-sql/pglite'
|
||||
import {
|
||||
PGLiteSocketHandler,
|
||||
PGLiteSocketServer,
|
||||
CONNECTION_QUEUE_TIMEOUT,
|
||||
} from '../src'
|
||||
import { Socket, createConnection } from 'net'
|
||||
import { existsSync } from 'fs'
|
||||
import { unlink } from 'fs/promises'
|
||||
|
||||
// Mock timers for testing timeouts
|
||||
beforeAll(() => {
|
||||
vi.useFakeTimers()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
vi.useRealTimers()
|
||||
})
|
||||
|
||||
async function testSocket(
|
||||
fn: (socketOptions: {
|
||||
host?: string
|
||||
port?: number
|
||||
path?: string
|
||||
}) => Promise<void>,
|
||||
) {
|
||||
describe('TCP socket server', async () => {
|
||||
await fn({ host: '127.0.0.1', port: 5433 })
|
||||
})
|
||||
describe('unix socket server', async () => {
|
||||
await fn({ path: '/tmp/.s.PGSQL.5432' })
|
||||
})
|
||||
}
|
||||
|
||||
// Create a mock Socket for testing
|
||||
const createMockSocket = () => {
|
||||
const eventHandlers: Record<string, Array<(data: any) => void>> = {}
|
||||
|
||||
const mockSocket = {
|
||||
// Socket methods we need for testing
|
||||
removeAllListeners: vi.fn(),
|
||||
end: vi.fn(),
|
||||
destroy: vi.fn(),
|
||||
write: vi.fn(),
|
||||
writable: true,
|
||||
remoteAddress: '127.0.0.1',
|
||||
remotePort: 12345,
|
||||
|
||||
// Mock on method with tracking of handlers
|
||||
on: vi
|
||||
.fn()
|
||||
.mockImplementation((event: string, callback: (data: any) => void) => {
|
||||
if (!eventHandlers[event]) {
|
||||
eventHandlers[event] = []
|
||||
}
|
||||
eventHandlers[event].push(callback)
|
||||
return mockSocket
|
||||
}),
|
||||
|
||||
// Store event handlers for testing
|
||||
eventHandlers,
|
||||
|
||||
// Helper to emit events
|
||||
emit(event: string, data: any) {
|
||||
if (eventHandlers[event]) {
|
||||
eventHandlers[event].forEach((handler) => handler(data))
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
return mockSocket as unknown as Socket
|
||||
}
|
||||
|
||||
describe('PGLiteSocketHandler', () => {
|
||||
let db: PGlite
|
||||
let handler: PGLiteSocketHandler
|
||||
let mockSocket: ReturnType<typeof createMockSocket> & {
|
||||
eventHandlers: Record<string, Array<(data: any) => void>>
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create a PGlite instance for testing
|
||||
db = await PGlite.create()
|
||||
handler = new PGLiteSocketHandler({ db })
|
||||
mockSocket = createMockSocket() as any
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
// Ensure handler is detached before closing the database
|
||||
if (handler?.isAttached) {
|
||||
handler.detach(true)
|
||||
}
|
||||
|
||||
// Clean up
|
||||
await db.close()
|
||||
})
|
||||
|
||||
it('should attach to a socket', async () => {
|
||||
// Attach mock socket to handler
|
||||
await handler.attach(mockSocket)
|
||||
|
||||
// Check that the socket is attached
|
||||
expect(handler.isAttached).toBe(true)
|
||||
expect(mockSocket.on).toHaveBeenCalledWith('data', expect.any(Function))
|
||||
expect(mockSocket.on).toHaveBeenCalledWith('error', expect.any(Function))
|
||||
expect(mockSocket.on).toHaveBeenCalledWith('close', expect.any(Function))
|
||||
})
|
||||
|
||||
it('should detach from a socket', async () => {
|
||||
// First attach
|
||||
await handler.attach(mockSocket)
|
||||
expect(handler.isAttached).toBe(true)
|
||||
|
||||
// Then detach
|
||||
handler.detach(false)
|
||||
expect(handler.isAttached).toBe(false)
|
||||
expect(mockSocket.removeAllListeners).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should close socket when detaching with close option', async () => {
|
||||
// Attach mock socket to handler
|
||||
await handler.attach(mockSocket)
|
||||
|
||||
// Detach with close option
|
||||
handler.detach(true)
|
||||
expect(handler.isAttached).toBe(false)
|
||||
expect(mockSocket.end).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should reject attaching multiple sockets', async () => {
|
||||
// Attach first socket
|
||||
await handler.attach(mockSocket)
|
||||
|
||||
// Trying to attach another socket should throw an error
|
||||
const anotherMockSocket = createMockSocket()
|
||||
await expect(handler.attach(anotherMockSocket)).rejects.toThrow(
|
||||
'Socket already attached',
|
||||
)
|
||||
})
|
||||
|
||||
it('should emit error event when socket has error', async () => {
|
||||
// Set up error listener
|
||||
const errorHandler = vi.fn()
|
||||
handler.addEventListener('error', errorHandler)
|
||||
|
||||
// Attach socket
|
||||
await handler.attach(mockSocket)
|
||||
|
||||
// Mock the event handler logic directly instead of triggering actual error handlers
|
||||
const customEvent = new CustomEvent('error', {
|
||||
detail: { code: 'MOCK_ERROR', message: 'Test socket error' },
|
||||
})
|
||||
handler.dispatchEvent(customEvent)
|
||||
|
||||
// Verify error handler was called
|
||||
expect(errorHandler).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should emit close event when socket closes', async () => {
|
||||
// Set up close listener
|
||||
const closeHandler = vi.fn()
|
||||
handler.addEventListener('close', closeHandler)
|
||||
|
||||
// Attach socket
|
||||
await handler.attach(mockSocket)
|
||||
|
||||
// Mock the event handler logic directly instead of triggering actual socket handlers
|
||||
const customEvent = new CustomEvent('close')
|
||||
handler.dispatchEvent(customEvent)
|
||||
|
||||
// Verify close handler was called
|
||||
expect(closeHandler).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
testSocket(async (connOptions) => {
|
||||
describe('PGLiteSocketServer', () => {
|
||||
let db: PGlite
|
||||
let server: PGLiteSocketServer
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create a PGlite instance for testing
|
||||
db = await PGlite.create()
|
||||
if (connOptions.path) {
|
||||
if (existsSync(connOptions.path)) {
|
||||
try {
|
||||
await unlink(connOptions.path)
|
||||
console.log(`Removed old socket at ${connOptions.path}`)
|
||||
} catch (err) {
|
||||
console.log('')
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
// Stop server if running
|
||||
try {
|
||||
await server?.stop()
|
||||
} catch (e) {
|
||||
// Ignore errors during cleanup
|
||||
}
|
||||
|
||||
// Close database
|
||||
await db.close()
|
||||
})
|
||||
|
||||
it('should start and stop server', async () => {
|
||||
// Create server
|
||||
server = new PGLiteSocketServer({
|
||||
db,
|
||||
host: connOptions.host,
|
||||
port: connOptions.port,
|
||||
path: connOptions.path,
|
||||
})
|
||||
|
||||
// Start server
|
||||
await server.start()
|
||||
|
||||
// Try to connect to confirm server is running
|
||||
let client
|
||||
if (connOptions.path) {
|
||||
// unix socket
|
||||
client = createConnection({ path: connOptions.path })
|
||||
} else {
|
||||
if (connOptions.port) {
|
||||
// TCP socket
|
||||
client = createConnection({
|
||||
port: connOptions.port,
|
||||
host: connOptions.host,
|
||||
})
|
||||
} else {
|
||||
throw new Error(
|
||||
'need to specify connOptions.path or connOptions.port',
|
||||
)
|
||||
}
|
||||
}
|
||||
client.on('error', () => {
|
||||
// Ignore connection errors during test
|
||||
})
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
client.on('connect', () => {
|
||||
client.end()
|
||||
resolve()
|
||||
})
|
||||
|
||||
// Set timeout to resolve in case connection fails
|
||||
setTimeout(resolve, 100)
|
||||
})
|
||||
|
||||
// Stop server
|
||||
await server.stop()
|
||||
|
||||
// Try to connect again - should fail
|
||||
await expect(
|
||||
new Promise<void>((resolve, reject) => {
|
||||
let failClient
|
||||
if (connOptions.path) {
|
||||
// unix socket
|
||||
failClient = createConnection({ path: connOptions.path })
|
||||
} else {
|
||||
if (connOptions.port) {
|
||||
// TCP socket
|
||||
failClient = createConnection({
|
||||
port: connOptions.port,
|
||||
host: connOptions.host,
|
||||
})
|
||||
} else {
|
||||
throw new Error(
|
||||
'need to specify connOptions.path or connOptions.port',
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
failClient.on('error', () => {
|
||||
// Expected error - connection should fail
|
||||
resolve()
|
||||
})
|
||||
|
||||
failClient.on('connect', () => {
|
||||
failClient.end()
|
||||
reject(new Error('Connection should have failed'))
|
||||
})
|
||||
|
||||
// Set timeout to resolve in case no events fire
|
||||
setTimeout(resolve, 100)
|
||||
}),
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
|
||||
describe('Connection queuing', () => {
|
||||
// Mock implementation details
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
let handleConnectionSpy: any
|
||||
let processNextInQueueSpy: any
|
||||
let attachSocketToNewHandlerSpy: any
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a server with a short timeout for testing
|
||||
server = new PGLiteSocketServer({
|
||||
db,
|
||||
host: connOptions.host,
|
||||
port: connOptions.port,
|
||||
path: connOptions.path,
|
||||
connectionQueueTimeout: 100, // Very short timeout for testing
|
||||
})
|
||||
|
||||
// Spy on internal methods
|
||||
handleConnectionSpy = vi.spyOn(server as any, 'handleConnection')
|
||||
processNextInQueueSpy = vi.spyOn(server as any, 'processNextInQueue')
|
||||
attachSocketToNewHandlerSpy = vi.spyOn(
|
||||
server as any,
|
||||
'attachSocketToNewHandler',
|
||||
)
|
||||
})
|
||||
|
||||
it('should create a handler for a new connection', async () => {
|
||||
await server.start()
|
||||
|
||||
// Create mock socket
|
||||
const socket1 = createMockSocket()
|
||||
|
||||
// Setup event listener
|
||||
const connectionHandler = vi.fn()
|
||||
server.addEventListener('connection', connectionHandler)
|
||||
|
||||
// Handle connection
|
||||
await (server as any).handleConnection(socket1)
|
||||
|
||||
// Verify handler was created
|
||||
expect(attachSocketToNewHandlerSpy).toHaveBeenCalledWith(
|
||||
socket1,
|
||||
expect.anything(),
|
||||
)
|
||||
expect(connectionHandler).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should queue a second connection when first is active', async () => {
|
||||
await server.start()
|
||||
|
||||
// Setup event listeners
|
||||
const queuedConnectionHandler = vi.fn()
|
||||
server.addEventListener('queuedConnection', queuedConnectionHandler)
|
||||
|
||||
// Create mock sockets
|
||||
const socket1 = createMockSocket()
|
||||
const socket2 = createMockSocket()
|
||||
|
||||
// Handle first connection
|
||||
await (server as any).handleConnection(socket1)
|
||||
|
||||
// The first socket should be attached directly
|
||||
expect(attachSocketToNewHandlerSpy).toHaveBeenCalledWith(
|
||||
socket1,
|
||||
expect.anything(),
|
||||
)
|
||||
|
||||
// Handle second connection - should be queued
|
||||
await (server as any).handleConnection(socket2)
|
||||
|
||||
// The second connection should be queued
|
||||
expect(queuedConnectionHandler).toHaveBeenCalledTimes(1)
|
||||
expect(queuedConnectionHandler).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
detail: expect.objectContaining({
|
||||
queueSize: 1,
|
||||
}),
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
it('should process next connection when current connection closes', async () => {
|
||||
await server.start()
|
||||
|
||||
// Create mock sockets
|
||||
const socket1 = createMockSocket()
|
||||
const socket2 = createMockSocket()
|
||||
|
||||
// Setup event listener
|
||||
const connectionHandler = vi.fn()
|
||||
server.addEventListener('connection', connectionHandler)
|
||||
|
||||
// Handle first connection
|
||||
await (server as any).handleConnection(socket1)
|
||||
|
||||
// Handle second connection (will be queued)
|
||||
await (server as any).handleConnection(socket2)
|
||||
|
||||
// First connection should be active, but clear the handler for next assertions
|
||||
expect(connectionHandler).toHaveBeenCalled()
|
||||
connectionHandler.mockClear()
|
||||
|
||||
// Simulate closing the first connection
|
||||
const activeHandler = (server as any).activeHandler
|
||||
activeHandler.dispatchEvent(new CustomEvent('close'))
|
||||
|
||||
// The next connection should be processed
|
||||
expect(processNextInQueueSpy).toHaveBeenCalled()
|
||||
expect(attachSocketToNewHandlerSpy).toHaveBeenCalledWith(
|
||||
socket2,
|
||||
expect.anything(),
|
||||
)
|
||||
})
|
||||
|
||||
it('should timeout queued connections after specified time', async () => {
|
||||
await server.start()
|
||||
|
||||
// Setup event listeners
|
||||
const queueTimeoutHandler = vi.fn()
|
||||
server.addEventListener('queueTimeout', queueTimeoutHandler)
|
||||
|
||||
// Create mock sockets
|
||||
const socket1 = createMockSocket()
|
||||
const socket2 = createMockSocket()
|
||||
|
||||
// Handle first connection
|
||||
await (server as any).handleConnection(socket1)
|
||||
|
||||
// Handle second connection (will be queued)
|
||||
await (server as any).handleConnection(socket2)
|
||||
|
||||
// Fast-forward time to trigger timeout
|
||||
vi.advanceTimersByTime(1001)
|
||||
|
||||
// The queued connection should timeout
|
||||
expect(queueTimeoutHandler).toHaveBeenCalledTimes(1)
|
||||
expect(socket2.end).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should use default timeout value from CONNECTION_QUEUE_TIMEOUT', async () => {
|
||||
// Create server without specifying timeout
|
||||
const defaultServer = new PGLiteSocketServer({
|
||||
db,
|
||||
host: connOptions.host,
|
||||
port: connOptions.port,
|
||||
path: connOptions.path,
|
||||
})
|
||||
|
||||
// Check that it's using the default timeout
|
||||
expect((defaultServer as any).connectionQueueTimeout).toBe(
|
||||
CONNECTION_QUEUE_TIMEOUT,
|
||||
)
|
||||
})
|
||||
|
||||
it('should clean up queue when stopping the server', async () => {
|
||||
await server.start()
|
||||
|
||||
// Create mock sockets
|
||||
const socket1 = createMockSocket()
|
||||
const socket2 = createMockSocket()
|
||||
|
||||
// Handle first connection
|
||||
await (server as any).handleConnection(socket1)
|
||||
|
||||
// Handle second connection (will be queued)
|
||||
await (server as any).handleConnection(socket2)
|
||||
|
||||
// Stop the server
|
||||
await server.stop()
|
||||
|
||||
// All connections should be closed
|
||||
expect(socket1.end).toHaveBeenCalled()
|
||||
expect(socket2.end).toHaveBeenCalled()
|
||||
|
||||
// Queue should be emptied
|
||||
expect((server as any).connectionQueue).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should start server with OS-assigned port when port is 0', async () => {
|
||||
server = new PGLiteSocketServer({
|
||||
db,
|
||||
host: connOptions.host,
|
||||
port: 0, // Let OS assign port
|
||||
})
|
||||
|
||||
await server.start()
|
||||
const assignedPort = (server as any).port
|
||||
expect(assignedPort).toBeGreaterThan(1024)
|
||||
|
||||
// Try to connect to confirm server is running
|
||||
const client = createConnection({
|
||||
port: assignedPort,
|
||||
host: connOptions.host,
|
||||
})
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
client.on('error', () => {
|
||||
reject(new Error('Connection should have failed'))
|
||||
})
|
||||
client.on('connect', () => {
|
||||
client.end()
|
||||
resolve()
|
||||
})
|
||||
setTimeout(resolve, 100)
|
||||
})
|
||||
|
||||
await server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
717
_node_modules/@electric-sql/pglite-socket/tests/query-with-node-pg.test.ts
generated
Normal file
717
_node_modules/@electric-sql/pglite-socket/tests/query-with-node-pg.test.ts
generated
Normal file
@@ -0,0 +1,717 @@
|
||||
import {
|
||||
describe,
|
||||
it,
|
||||
expect,
|
||||
beforeAll,
|
||||
afterAll,
|
||||
beforeEach,
|
||||
afterEach,
|
||||
} from 'vitest'
|
||||
import { Client } from 'pg'
|
||||
import { PGlite } from '@electric-sql/pglite'
|
||||
import { PGLiteSocketServer } from '../src'
|
||||
import { spawn, ChildProcess } from 'node:child_process'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import { dirname, join } from 'node:path'
|
||||
import fs from 'fs'
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url)
|
||||
const __dirname = dirname(__filename)
|
||||
|
||||
/**
|
||||
* Debug configuration for testing
|
||||
*
|
||||
* To test against a real PostgreSQL server:
|
||||
* - Set DEBUG_TESTS=true as an environment variable
|
||||
* - Optionally set DEBUG_TESTS_REAL_SERVER with a connection URL (defaults to localhost)
|
||||
*
|
||||
* Example:
|
||||
* DEBUG_TESTS=true DEBUG_TESTS_REAL_SERVER=postgres://user:pass@host:port/db npm vitest ./tests/query-with-node-pg.test.ts
|
||||
*/
|
||||
const DEBUG_TESTS = process.env.DEBUG_TESTS === 'true'
|
||||
const DEBUG_TESTS_REAL_SERVER =
|
||||
process.env.DEBUG_TESTS_REAL_SERVER ||
|
||||
'postgres://postgres:postgres@localhost:5432/postgres'
|
||||
const TEST_PORT = 5434
|
||||
|
||||
describe(`PGLite Socket Server`, () => {
|
||||
describe('with node-pg client', () => {
|
||||
let db: PGlite
|
||||
let server: PGLiteSocketServer
|
||||
let client: typeof Client.prototype
|
||||
let connectionConfig: any
|
||||
|
||||
beforeAll(async () => {
|
||||
if (DEBUG_TESTS) {
|
||||
console.log('TESTING WITH REAL POSTGRESQL SERVER')
|
||||
console.log(`Connection URL: ${DEBUG_TESTS_REAL_SERVER}`)
|
||||
} else {
|
||||
console.log('TESTING WITH PGLITE SERVER')
|
||||
|
||||
// Create a PGlite instance
|
||||
db = await PGlite.create()
|
||||
|
||||
// Wait for database to be ready
|
||||
await db.waitReady
|
||||
|
||||
console.log('PGLite database ready')
|
||||
|
||||
// Create and start the server with explicit host
|
||||
server = new PGLiteSocketServer({
|
||||
db,
|
||||
port: TEST_PORT,
|
||||
host: '127.0.0.1',
|
||||
})
|
||||
|
||||
// Add event listeners for debugging
|
||||
server.addEventListener('error', (event) => {
|
||||
console.error('Socket server error:', (event as CustomEvent).detail)
|
||||
})
|
||||
|
||||
server.addEventListener('connection', (event) => {
|
||||
console.log(
|
||||
'Socket connection received:',
|
||||
(event as CustomEvent).detail,
|
||||
)
|
||||
})
|
||||
|
||||
await server.start()
|
||||
console.log(`PGLite Socket Server started on port ${TEST_PORT}`)
|
||||
|
||||
connectionConfig = {
|
||||
host: '127.0.0.1',
|
||||
port: TEST_PORT,
|
||||
database: 'postgres',
|
||||
user: 'postgres',
|
||||
password: 'postgres',
|
||||
// Connection timeout in milliseconds
|
||||
connectionTimeoutMillis: 10000,
|
||||
// Query timeout in milliseconds
|
||||
statement_timeout: 5000,
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
if (!DEBUG_TESTS) {
|
||||
// Stop server if running
|
||||
if (server) {
|
||||
await server.stop()
|
||||
console.log('PGLite Socket Server stopped')
|
||||
}
|
||||
|
||||
// Close database
|
||||
if (db) {
|
||||
await db.close()
|
||||
console.log('PGLite database closed')
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create pg client instance before each test
|
||||
if (DEBUG_TESTS) {
|
||||
// Direct connection to real PostgreSQL server using URL
|
||||
client = new Client({
|
||||
connectionString: DEBUG_TESTS_REAL_SERVER,
|
||||
connectionTimeoutMillis: 10000,
|
||||
statement_timeout: 5000,
|
||||
})
|
||||
} else {
|
||||
// Connection to PGLite Socket Server
|
||||
client = new Client(connectionConfig)
|
||||
}
|
||||
|
||||
// Connect the client
|
||||
await client.connect()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
// Clean up any tables created in tests
|
||||
try {
|
||||
await client.query('DROP TABLE IF EXISTS test_users')
|
||||
} catch (e) {
|
||||
console.error('Error cleaning up tables:', e)
|
||||
}
|
||||
|
||||
// Disconnect the client after each test
|
||||
if (client) {
|
||||
await client.end()
|
||||
}
|
||||
})
|
||||
|
||||
it('should execute a basic SELECT query', async () => {
|
||||
const result = await client.query('SELECT 1 as one')
|
||||
expect(result.rows[0].one).toBe(1)
|
||||
})
|
||||
|
||||
it('should create a table', async () => {
|
||||
await client.query(`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
email TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`)
|
||||
|
||||
// Verify table exists by querying the schema
|
||||
const tableCheck = await client.query(`
|
||||
SELECT table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'public' AND table_name = 'test_users'
|
||||
`)
|
||||
|
||||
expect(tableCheck.rows.length).toBe(1)
|
||||
expect(tableCheck.rows[0].table_name).toBe('test_users')
|
||||
})
|
||||
|
||||
it('should insert rows into a table', async () => {
|
||||
// Create table
|
||||
await client.query(`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
email TEXT
|
||||
)
|
||||
`)
|
||||
|
||||
// Insert data
|
||||
const insertResult = await client.query(`
|
||||
INSERT INTO test_users (name, email)
|
||||
VALUES
|
||||
('Alice', 'alice@example.com'),
|
||||
('Bob', 'bob@example.com')
|
||||
RETURNING *
|
||||
`)
|
||||
|
||||
expect(insertResult.rows.length).toBe(2)
|
||||
expect(insertResult.rows[0].name).toBe('Alice')
|
||||
expect(insertResult.rows[1].name).toBe('Bob')
|
||||
|
||||
// Verify data is there
|
||||
const count = await client.query(
|
||||
'SELECT COUNT(*)::int as count FROM test_users',
|
||||
)
|
||||
expect(count.rows[0].count).toBe(2)
|
||||
})
|
||||
|
||||
it('should update rows in a table', async () => {
|
||||
// Create and populate table
|
||||
await client.query(`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
email TEXT
|
||||
)
|
||||
`)
|
||||
|
||||
await client.query(`
|
||||
INSERT INTO test_users (name, email)
|
||||
VALUES ('Alice', 'alice@example.com')
|
||||
`)
|
||||
|
||||
// Update
|
||||
const updateResult = await client.query(`
|
||||
UPDATE test_users
|
||||
SET email = 'alice.new@example.com'
|
||||
WHERE name = 'Alice'
|
||||
RETURNING *
|
||||
`)
|
||||
|
||||
expect(updateResult.rows.length).toBe(1)
|
||||
expect(updateResult.rows[0].email).toBe('alice.new@example.com')
|
||||
})
|
||||
|
||||
it('should delete rows from a table', async () => {
|
||||
// Create and populate table
|
||||
await client.query(`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
email TEXT
|
||||
)
|
||||
`)
|
||||
|
||||
await client.query(`
|
||||
INSERT INTO test_users (name, email)
|
||||
VALUES
|
||||
('Alice', 'alice@example.com'),
|
||||
('Bob', 'bob@example.com')
|
||||
`)
|
||||
|
||||
// Delete
|
||||
const deleteResult = await client.query(`
|
||||
DELETE FROM test_users
|
||||
WHERE name = 'Alice'
|
||||
RETURNING *
|
||||
`)
|
||||
|
||||
expect(deleteResult.rows.length).toBe(1)
|
||||
expect(deleteResult.rows[0].name).toBe('Alice')
|
||||
|
||||
// Verify only Bob remains
|
||||
const remaining = await client.query('SELECT * FROM test_users')
|
||||
expect(remaining.rows.length).toBe(1)
|
||||
expect(remaining.rows[0].name).toBe('Bob')
|
||||
})
|
||||
|
||||
it('should execute operations in a transaction', async () => {
|
||||
// Create table
|
||||
await client.query(`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
balance INTEGER DEFAULT 0
|
||||
)
|
||||
`)
|
||||
|
||||
// Insert initial data
|
||||
await client.query(`
|
||||
INSERT INTO test_users (name, balance)
|
||||
VALUES ('Alice', 100), ('Bob', 50)
|
||||
`)
|
||||
|
||||
// Start a transaction and perform operations
|
||||
await client.query('BEGIN')
|
||||
|
||||
try {
|
||||
// Deduct from Alice
|
||||
await client.query(`
|
||||
UPDATE test_users
|
||||
SET balance = balance - 30
|
||||
WHERE name = 'Alice'
|
||||
`)
|
||||
|
||||
// Add to Bob
|
||||
await client.query(`
|
||||
UPDATE test_users
|
||||
SET balance = balance + 30
|
||||
WHERE name = 'Bob'
|
||||
`)
|
||||
|
||||
// Commit the transaction
|
||||
await client.query('COMMIT')
|
||||
} catch (error) {
|
||||
// Rollback on error
|
||||
await client.query('ROLLBACK')
|
||||
throw error
|
||||
}
|
||||
|
||||
// Verify both operations succeeded
|
||||
const users = await client.query(
|
||||
'SELECT name, balance FROM test_users ORDER BY name',
|
||||
)
|
||||
|
||||
expect(users.rows.length).toBe(2)
|
||||
expect(users.rows[0].name).toBe('Alice')
|
||||
expect(users.rows[0].balance).toBe(70)
|
||||
expect(users.rows[1].name).toBe('Bob')
|
||||
expect(users.rows[1].balance).toBe(80)
|
||||
})
|
||||
|
||||
it('should rollback a transaction on ROLLBACK', async () => {
|
||||
// Create table
|
||||
await client.query(`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
balance INTEGER DEFAULT 0
|
||||
)
|
||||
`)
|
||||
|
||||
// Insert initial data
|
||||
await client.query(`
|
||||
INSERT INTO test_users (name, balance)
|
||||
VALUES ('Alice', 100), ('Bob', 50)
|
||||
`)
|
||||
|
||||
// Get initial balance
|
||||
const initialResult = await client.query(`
|
||||
SELECT balance FROM test_users WHERE name = 'Alice'
|
||||
`)
|
||||
const initialBalance = initialResult.rows[0].balance
|
||||
|
||||
// Start a transaction
|
||||
await client.query('BEGIN')
|
||||
|
||||
try {
|
||||
// Deduct from Alice
|
||||
await client.query(`
|
||||
UPDATE test_users
|
||||
SET balance = balance - 30
|
||||
WHERE name = 'Alice'
|
||||
`)
|
||||
|
||||
// Verify balance is changed within transaction
|
||||
const midResult = await client.query(`
|
||||
SELECT balance FROM test_users WHERE name = 'Alice'
|
||||
`)
|
||||
expect(midResult.rows[0].balance).toBe(70)
|
||||
|
||||
// Explicitly roll back (cancel) the transaction
|
||||
await client.query('ROLLBACK')
|
||||
} catch (error) {
|
||||
await client.query('ROLLBACK')
|
||||
throw error
|
||||
}
|
||||
|
||||
// Verify balance wasn't changed after rollback
|
||||
const finalResult = await client.query(`
|
||||
SELECT balance FROM test_users WHERE name = 'Alice'
|
||||
`)
|
||||
expect(finalResult.rows[0].balance).toBe(initialBalance)
|
||||
})
|
||||
|
||||
it('should rollback a transaction on error', async () => {
|
||||
// Create table
|
||||
await client.query(`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
balance INTEGER DEFAULT 0
|
||||
)
|
||||
`)
|
||||
|
||||
// Insert initial data
|
||||
await client.query(`
|
||||
INSERT INTO test_users (name, balance)
|
||||
VALUES ('Alice', 100), ('Bob', 50)
|
||||
`)
|
||||
|
||||
try {
|
||||
// Start a transaction
|
||||
await client.query('BEGIN')
|
||||
|
||||
// Deduct from Alice
|
||||
await client.query(`
|
||||
UPDATE test_users
|
||||
SET balance = balance - 30
|
||||
WHERE name = 'Alice'
|
||||
`)
|
||||
|
||||
// This will trigger an error
|
||||
await client.query(`
|
||||
UPDATE test_users_nonexistent
|
||||
SET balance = balance + 30
|
||||
WHERE name = 'Bob'
|
||||
`)
|
||||
|
||||
// Should never get here
|
||||
await client.query('COMMIT')
|
||||
} catch (error) {
|
||||
// Expected to fail - rollback transaction
|
||||
await client.query('ROLLBACK').catch(() => {
|
||||
// If the client connection is in a bad state, we just ignore
|
||||
// the rollback error
|
||||
})
|
||||
}
|
||||
|
||||
// Verify Alice's balance was not changed due to rollback
|
||||
const users = await client.query(
|
||||
'SELECT name, balance FROM test_users ORDER BY name',
|
||||
)
|
||||
|
||||
expect(users.rows.length).toBe(2)
|
||||
expect(users.rows[0].name).toBe('Alice')
|
||||
expect(users.rows[0].balance).toBe(100) // Should remain 100 after rollback
|
||||
})
|
||||
|
||||
it('should handle a syntax error', async () => {
|
||||
// Expect syntax error
|
||||
let errorMessage = ''
|
||||
try {
|
||||
await client.query('THIS IS NOT VALID SQL;')
|
||||
} catch (error) {
|
||||
errorMessage = (error as Error).message
|
||||
}
|
||||
|
||||
expect(errorMessage).not.toBe('')
|
||||
expect(errorMessage.toLowerCase()).toContain('syntax error')
|
||||
})
|
||||
|
||||
it('should support cursor-based pagination', async () => {
|
||||
// Create a test table with many rows
|
||||
await client.query(`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
value INTEGER
|
||||
)
|
||||
`)
|
||||
|
||||
// Insert 100 rows using generate_series (server-side generation)
|
||||
await client.query(`
|
||||
INSERT INTO test_users (name, value)
|
||||
SELECT
|
||||
'User ' || i as name,
|
||||
i as value
|
||||
FROM generate_series(1, 100) as i
|
||||
`)
|
||||
|
||||
// Use a cursor to read data in smaller chunks
|
||||
const chunkSize = 10
|
||||
let results: any[] = []
|
||||
let page = 0
|
||||
|
||||
try {
|
||||
// Begin transaction
|
||||
await client.query('BEGIN')
|
||||
|
||||
// Declare a cursor
|
||||
await client.query(
|
||||
'DECLARE user_cursor CURSOR FOR SELECT * FROM test_users ORDER BY id',
|
||||
)
|
||||
|
||||
let hasMoreData = true
|
||||
while (hasMoreData) {
|
||||
// Fetch a batch of results
|
||||
const chunk = await client.query('FETCH 10 FROM user_cursor')
|
||||
|
||||
// If no rows returned, we're done
|
||||
if (chunk.rows.length === 0) {
|
||||
hasMoreData = false
|
||||
continue
|
||||
}
|
||||
|
||||
// Process this chunk
|
||||
page++
|
||||
|
||||
// Add to our results array
|
||||
results = [...results, ...chunk.rows]
|
||||
|
||||
// Verify each chunk has correct data (except possibly the last one)
|
||||
if (chunk.rows.length === chunkSize) {
|
||||
expect(chunk.rows.length).toBe(chunkSize)
|
||||
expect(chunk.rows[0].id).toBe((page - 1) * chunkSize + 1)
|
||||
}
|
||||
}
|
||||
|
||||
// Close the cursor
|
||||
await client.query('CLOSE user_cursor')
|
||||
|
||||
// Commit transaction
|
||||
await client.query('COMMIT')
|
||||
} catch (error) {
|
||||
await client.query('ROLLBACK')
|
||||
throw error
|
||||
}
|
||||
|
||||
// Verify we got all 100 records
|
||||
expect(results.length).toBe(100)
|
||||
expect(results[0].name).toBe('User 1')
|
||||
expect(results[99].name).toBe('User 100')
|
||||
|
||||
// Verify we received the expected number of pages
|
||||
expect(page).toBe(Math.ceil(100 / chunkSize))
|
||||
})
|
||||
|
||||
it('should support LISTEN/NOTIFY for pub/sub messaging', async () => {
|
||||
// Set up listener for notifications
|
||||
let receivedPayload = ''
|
||||
const notificationReceived = new Promise<void>((resolve) => {
|
||||
client.on('notification', (msg) => {
|
||||
receivedPayload = msg.payload || ''
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
|
||||
// Start listening
|
||||
await client.query('LISTEN test_channel')
|
||||
|
||||
// Small delay to ensure listener is set up
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
|
||||
// Send a notification
|
||||
await client.query("NOTIFY test_channel, 'Hello from PGlite!'")
|
||||
|
||||
// Wait for the notification to be received with an appropriate timeout
|
||||
const timeoutPromise = new Promise<void>((_, reject) => {
|
||||
setTimeout(() => reject(new Error('Notification timeout')), 2000)
|
||||
})
|
||||
|
||||
await Promise.race([notificationReceived, timeoutPromise]).catch(
|
||||
(error) => {
|
||||
console.error('Notification error:', error)
|
||||
},
|
||||
)
|
||||
|
||||
// Verify the notification was received with the correct payload
|
||||
expect(receivedPayload).toBe('Hello from PGlite!')
|
||||
})
|
||||
})
|
||||
|
||||
describe('with extensions via CLI', () => {
|
||||
const UNIX_SOCKET_DIR_PATH = `/tmp/${Date.now().toString()}`
|
||||
fs.mkdirSync(UNIX_SOCKET_DIR_PATH)
|
||||
const UNIX_SOCKET_PATH = `${UNIX_SOCKET_DIR_PATH}/.s.PGSQL.5432`
|
||||
let serverProcess: ChildProcess | null = null
|
||||
let client: typeof Client.prototype
|
||||
|
||||
beforeAll(async () => {
|
||||
// Start the server with extensions via CLI using tsx for dev or node for dist
|
||||
const serverScript = join(__dirname, '../src/scripts/server.ts')
|
||||
serverProcess = spawn(
|
||||
'npx',
|
||||
[
|
||||
'tsx',
|
||||
serverScript,
|
||||
'--path',
|
||||
UNIX_SOCKET_PATH,
|
||||
'--extensions',
|
||||
'vector,pg_uuidv7,@electric-sql/pglite/pg_hashids:pg_hashids',
|
||||
],
|
||||
{
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
},
|
||||
)
|
||||
|
||||
// Wait for server to be ready by checking for "listening" message
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const timeout = setTimeout(() => {
|
||||
reject(new Error('Server startup timeout'))
|
||||
}, 30000)
|
||||
|
||||
const onData = (data: Buffer) => {
|
||||
const output = data.toString()
|
||||
if (output.includes('listening')) {
|
||||
clearTimeout(timeout)
|
||||
resolve()
|
||||
}
|
||||
}
|
||||
|
||||
serverProcess!.stdout?.on('data', onData)
|
||||
serverProcess!.stderr?.on('data', (data) => {
|
||||
console.error('Server stderr:', data.toString())
|
||||
})
|
||||
|
||||
serverProcess!.on('error', (err) => {
|
||||
clearTimeout(timeout)
|
||||
reject(err)
|
||||
})
|
||||
|
||||
serverProcess!.on('exit', (code) => {
|
||||
if (code !== 0 && code !== null) {
|
||||
clearTimeout(timeout)
|
||||
reject(new Error(`Server exited with code ${code}`))
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
console.log('Server with extensions started')
|
||||
|
||||
client = new Client({
|
||||
host: UNIX_SOCKET_DIR_PATH,
|
||||
database: 'postgres',
|
||||
user: 'postgres',
|
||||
password: 'postgres',
|
||||
connectionTimeoutMillis: 10000,
|
||||
})
|
||||
await client.connect()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
if (client) {
|
||||
await client.end().catch(() => {})
|
||||
}
|
||||
|
||||
if (serverProcess) {
|
||||
serverProcess.kill('SIGTERM')
|
||||
await new Promise<void>((resolve) => {
|
||||
serverProcess!.on('exit', () => resolve())
|
||||
setTimeout(resolve, 2000) // Force resolve after 2s
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
it('should load and use vector extension', async () => {
|
||||
// Create the extension
|
||||
await client.query('CREATE EXTENSION IF NOT EXISTS vector')
|
||||
|
||||
// Verify extension is loaded
|
||||
const extCheck = await client.query(`
|
||||
SELECT extname FROM pg_extension WHERE extname = 'vector'
|
||||
`)
|
||||
expect(extCheck.rows).toHaveLength(1)
|
||||
expect(extCheck.rows[0].extname).toBe('vector')
|
||||
|
||||
// Create a table with vector column
|
||||
await client.query(`
|
||||
CREATE TABLE test_vectors (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT,
|
||||
vec vector(3)
|
||||
)
|
||||
`)
|
||||
|
||||
// Insert test data
|
||||
await client.query(`
|
||||
INSERT INTO test_vectors (name, vec) VALUES
|
||||
('test1', '[1,2,3]'),
|
||||
('test2', '[4,5,6]'),
|
||||
('test3', '[7,8,9]')
|
||||
`)
|
||||
|
||||
// Query with vector distance
|
||||
const result = await client.query(`
|
||||
SELECT name, vec, vec <-> '[3,1,2]' AS distance
|
||||
FROM test_vectors
|
||||
ORDER BY distance
|
||||
`)
|
||||
|
||||
expect(result.rows).toHaveLength(3)
|
||||
expect(result.rows[0].name).toBe('test1')
|
||||
expect(result.rows[0].vec).toBe('[1,2,3]')
|
||||
expect(parseFloat(result.rows[0].distance)).toBeCloseTo(2.449, 2)
|
||||
})
|
||||
|
||||
it('should load and use pg_uuidv7 extension', async () => {
|
||||
// Create the extension
|
||||
await client.query('CREATE EXTENSION IF NOT EXISTS pg_uuidv7')
|
||||
|
||||
// Verify extension is loaded
|
||||
const extCheck = await client.query(`
|
||||
SELECT extname FROM pg_extension WHERE extname = 'pg_uuidv7'
|
||||
`)
|
||||
expect(extCheck.rows).toHaveLength(1)
|
||||
expect(extCheck.rows[0].extname).toBe('pg_uuidv7')
|
||||
|
||||
// Generate a UUIDv7
|
||||
const result = await client.query('SELECT uuid_generate_v7() as uuid')
|
||||
expect(result.rows[0].uuid).toHaveLength(36)
|
||||
|
||||
// Test uuid_v7_to_timestamptz function
|
||||
const tsResult = await client.query(`
|
||||
SELECT uuid_v7_to_timestamptz('018570bb-4a7d-7c7e-8df4-6d47afd8c8fc') as ts
|
||||
`)
|
||||
const timestamp = new Date(tsResult.rows[0].ts)
|
||||
expect(timestamp.toISOString()).toBe('2023-01-02T04:26:40.637Z')
|
||||
})
|
||||
|
||||
it('should load and use pg_hashids extension from npm package path', async () => {
|
||||
// Create the extension
|
||||
await client.query('CREATE EXTENSION IF NOT EXISTS pg_hashids')
|
||||
|
||||
// Verify extension is loaded
|
||||
const extCheck = await client.query(`
|
||||
SELECT extname FROM pg_extension WHERE extname = 'pg_hashids'
|
||||
`)
|
||||
expect(extCheck.rows).toHaveLength(1)
|
||||
expect(extCheck.rows[0].extname).toBe('pg_hashids')
|
||||
|
||||
// Test id_encode function
|
||||
const result = await client.query(`
|
||||
SELECT id_encode(1234567, 'salt', 10, 'abcdefghijABCDEFGHIJ1234567890') as hash
|
||||
`)
|
||||
expect(result.rows[0].hash).toBeTruthy()
|
||||
expect(typeof result.rows[0].hash).toBe('string')
|
||||
|
||||
// Test id_decode function (round-trip)
|
||||
const hash = result.rows[0].hash
|
||||
const decodeResult = await client.query(`
|
||||
SELECT id_decode('${hash}', 'salt', 10, 'abcdefghijABCDEFGHIJ1234567890') as id
|
||||
`)
|
||||
expect(decodeResult.rows[0].id[0]).toBe('1234567')
|
||||
})
|
||||
})
|
||||
})
|
||||
678
_node_modules/@electric-sql/pglite-socket/tests/query-with-postgres-js.test.ts
generated
Normal file
678
_node_modules/@electric-sql/pglite-socket/tests/query-with-postgres-js.test.ts
generated
Normal file
@@ -0,0 +1,678 @@
|
||||
import {
|
||||
describe,
|
||||
it,
|
||||
expect,
|
||||
beforeAll,
|
||||
afterAll,
|
||||
beforeEach,
|
||||
afterEach,
|
||||
} from 'vitest'
|
||||
import postgres from 'postgres'
|
||||
import { PGlite } from '@electric-sql/pglite'
|
||||
import { PGLiteSocketServer } from '../src'
|
||||
import { spawn, ChildProcess } from 'node:child_process'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import { dirname, join } from 'node:path'
|
||||
import fs from 'fs'
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url)
|
||||
const __dirname = dirname(__filename)
|
||||
|
||||
/**
|
||||
* Debug configuration for testing
|
||||
*
|
||||
* To test against a real PostgreSQL server:
|
||||
* - Set DEBUG_TESTS=true as an environment variable
|
||||
* - Optionally set DEBUG_TESTS_REAL_SERVER with a connection URL (defaults to localhost)
|
||||
*
|
||||
* Example:
|
||||
* DEBUG_TESTS=true DEBUG_TESTS_REAL_SERVER=postgres://user:pass@host:port/db npm vitest ./tests/query-with-postgres-js.test.ts
|
||||
*/
|
||||
const DEBUG_LOCAL = process.env.DEBUG_LOCAL === 'true'
|
||||
const DEBUG_TESTS = process.env.DEBUG_TESTS === 'true'
|
||||
const DEBUG_TESTS_REAL_SERVER =
|
||||
process.env.DEBUG_TESTS_REAL_SERVER ||
|
||||
'postgres://postgres:postgres@localhost:5432/postgres'
|
||||
const TEST_PORT = 5434
|
||||
|
||||
describe(`PGLite Socket Server`, () => {
|
||||
describe('with postgres.js client', () => {
|
||||
let db: PGlite
|
||||
let server: PGLiteSocketServer
|
||||
let sql: ReturnType<typeof postgres>
|
||||
let connectionConfig: any
|
||||
|
||||
beforeAll(async () => {
|
||||
if (DEBUG_TESTS) {
|
||||
console.log('TESTING WITH REAL POSTGRESQL SERVER')
|
||||
console.log(`Connection URL: ${DEBUG_TESTS_REAL_SERVER}`)
|
||||
} else {
|
||||
console.log('TESTING WITH PGLITE SERVER')
|
||||
|
||||
// Create a PGlite instance
|
||||
if (DEBUG_LOCAL) db = await PGlite.create({ debug: '1' })
|
||||
else db = await PGlite.create()
|
||||
|
||||
// Wait for database to be ready
|
||||
await db.waitReady
|
||||
|
||||
console.log('PGLite database ready')
|
||||
|
||||
// Create and start the server with explicit host
|
||||
server = new PGLiteSocketServer({
|
||||
db,
|
||||
port: TEST_PORT,
|
||||
host: '127.0.0.1',
|
||||
inspect: DEBUG_TESTS || DEBUG_LOCAL,
|
||||
})
|
||||
|
||||
// Add event listeners for debugging
|
||||
server.addEventListener('error', (event) => {
|
||||
console.error('Socket server error:', (event as CustomEvent).detail)
|
||||
})
|
||||
|
||||
server.addEventListener('connection', (event) => {
|
||||
console.log(
|
||||
'Socket connection received:',
|
||||
(event as CustomEvent).detail,
|
||||
)
|
||||
})
|
||||
|
||||
await server.start()
|
||||
console.log(`PGLite Socket Server started on port ${TEST_PORT}`)
|
||||
|
||||
connectionConfig = {
|
||||
host: '127.0.0.1',
|
||||
port: TEST_PORT,
|
||||
database: 'postgres',
|
||||
username: 'postgres',
|
||||
password: 'postgres',
|
||||
idle_timeout: 5,
|
||||
connect_timeout: 10,
|
||||
max: 1,
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
if (!DEBUG_TESTS) {
|
||||
// Stop server if running
|
||||
if (server) {
|
||||
await server.stop()
|
||||
console.log('PGLite Socket Server stopped')
|
||||
}
|
||||
|
||||
// Close database
|
||||
if (db) {
|
||||
await db.close()
|
||||
console.log('PGLite database closed')
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a postgres client instance before each test
|
||||
if (DEBUG_TESTS) {
|
||||
// Direct connection to real PostgreSQL server using URL
|
||||
sql = postgres(DEBUG_TESTS_REAL_SERVER, {
|
||||
idle_timeout: 5,
|
||||
connect_timeout: 10,
|
||||
max: 1,
|
||||
})
|
||||
} else {
|
||||
// Connection to PGLite Socket Server
|
||||
sql = postgres(connectionConfig)
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
// Clean up any tables created in tests
|
||||
try {
|
||||
await sql`DROP TABLE IF EXISTS test_users`
|
||||
} catch (e) {
|
||||
console.error('Error cleaning up tables:', e)
|
||||
}
|
||||
|
||||
// Disconnect the client after each test
|
||||
if (sql) {
|
||||
await sql.end()
|
||||
}
|
||||
})
|
||||
if (!DEBUG_LOCAL) {
|
||||
it('should execute a basic SELECT query', async () => {
|
||||
const result = await sql`SELECT 1 as one`
|
||||
expect(result[0].one).toBe(1)
|
||||
})
|
||||
|
||||
it('should create a table', async () => {
|
||||
await sql`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
email TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`
|
||||
|
||||
// Verify table exists by querying the schema
|
||||
const tableCheck = await sql`
|
||||
SELECT table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'public' AND table_name = 'test_users'
|
||||
`
|
||||
|
||||
expect(tableCheck.length).toBe(1)
|
||||
expect(tableCheck[0].table_name).toBe('test_users')
|
||||
})
|
||||
|
||||
it('should insert rows into a table', async () => {
|
||||
// Create table
|
||||
await sql`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
email TEXT
|
||||
)
|
||||
`
|
||||
|
||||
// Insert data
|
||||
const insertResult = await sql`
|
||||
INSERT INTO test_users (name, email)
|
||||
VALUES
|
||||
('Alice', 'alice@example.com'),
|
||||
('Bob', 'bob@example.com')
|
||||
RETURNING *
|
||||
`
|
||||
|
||||
expect(insertResult.length).toBe(2)
|
||||
expect(insertResult[0].name).toBe('Alice')
|
||||
expect(insertResult[1].name).toBe('Bob')
|
||||
|
||||
// Verify data is there
|
||||
const count = await sql`SELECT COUNT(*)::int as count FROM test_users`
|
||||
expect(count[0].count).toBe(2)
|
||||
})
|
||||
|
||||
it('should update rows in a table', async () => {
|
||||
// Create and populate table
|
||||
await sql`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
email TEXT
|
||||
)
|
||||
`
|
||||
|
||||
await sql`
|
||||
INSERT INTO test_users (name, email)
|
||||
VALUES ('Alice', 'alice@example.com')
|
||||
`
|
||||
|
||||
// Update
|
||||
const updateResult = await sql`
|
||||
UPDATE test_users
|
||||
SET email = 'alice.new@example.com'
|
||||
WHERE name = 'Alice'
|
||||
RETURNING *
|
||||
`
|
||||
|
||||
expect(updateResult.length).toBe(1)
|
||||
expect(updateResult[0].email).toBe('alice.new@example.com')
|
||||
})
|
||||
|
||||
it('should delete rows from a table', async () => {
|
||||
// Create and populate table
|
||||
await sql`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
email TEXT
|
||||
)
|
||||
`
|
||||
|
||||
await sql`
|
||||
INSERT INTO test_users (name, email)
|
||||
VALUES
|
||||
('Alice', 'alice@example.com'),
|
||||
('Bob', 'bob@example.com')
|
||||
`
|
||||
|
||||
// Delete
|
||||
const deleteResult = await sql`
|
||||
DELETE FROM test_users
|
||||
WHERE name = 'Alice'
|
||||
RETURNING *
|
||||
`
|
||||
|
||||
expect(deleteResult.length).toBe(1)
|
||||
expect(deleteResult[0].name).toBe('Alice')
|
||||
|
||||
// Verify only Bob remains
|
||||
const remaining = await sql`SELECT * FROM test_users`
|
||||
expect(remaining.length).toBe(1)
|
||||
expect(remaining[0].name).toBe('Bob')
|
||||
})
|
||||
|
||||
it('should execute operations in a transaction', async () => {
|
||||
// Create table
|
||||
await sql`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
balance INTEGER DEFAULT 0
|
||||
)
|
||||
`
|
||||
|
||||
// Insert initial data
|
||||
await sql`
|
||||
INSERT INTO test_users (name, balance)
|
||||
VALUES ('Alice', 100), ('Bob', 50)
|
||||
`
|
||||
|
||||
// Start a transaction and perform operations
|
||||
await sql.begin(async (tx) => {
|
||||
// Deduct from Alice
|
||||
await tx`
|
||||
UPDATE test_users
|
||||
SET balance = balance - 30
|
||||
WHERE name = 'Alice'
|
||||
`
|
||||
|
||||
// Add to Bob
|
||||
await tx`
|
||||
UPDATE test_users
|
||||
SET balance = balance + 30
|
||||
WHERE name = 'Bob'
|
||||
`
|
||||
})
|
||||
|
||||
// Verify both operations succeeded
|
||||
const users =
|
||||
await sql`SELECT name, balance FROM test_users ORDER BY name`
|
||||
|
||||
expect(users.length).toBe(2)
|
||||
expect(users[0].name).toBe('Alice')
|
||||
expect(users[0].balance).toBe(70)
|
||||
expect(users[1].name).toBe('Bob')
|
||||
expect(users[1].balance).toBe(80)
|
||||
})
|
||||
|
||||
it('should rollback a transaction on ROLLBACK', async () => {
|
||||
// Create table
|
||||
await sql`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
balance INTEGER DEFAULT 0
|
||||
)
|
||||
`
|
||||
|
||||
// Insert initial data
|
||||
await sql`
|
||||
INSERT INTO test_users (name, balance)
|
||||
VALUES ('Alice', 100), ('Bob', 50)
|
||||
`
|
||||
|
||||
// Get initial balance
|
||||
const initialResult = await sql`
|
||||
SELECT balance FROM test_users WHERE name = 'Alice'
|
||||
`
|
||||
const initialBalance = initialResult[0].balance
|
||||
|
||||
// Start a transaction
|
||||
await sql
|
||||
.begin(async (tx) => {
|
||||
// Deduct from Alice
|
||||
await tx`
|
||||
UPDATE test_users
|
||||
SET balance = balance - 30
|
||||
WHERE name = 'Alice'
|
||||
`
|
||||
|
||||
// Verify balance is changed within transaction
|
||||
const midResult = await tx`
|
||||
SELECT balance FROM test_users WHERE name = 'Alice'
|
||||
`
|
||||
expect(midResult[0].balance).toBe(70)
|
||||
|
||||
// Explicitly roll back (cancel) the transaction
|
||||
throw new Error('Triggering rollback')
|
||||
})
|
||||
.catch(() => {
|
||||
// Expected error to trigger rollback
|
||||
console.log('Transaction was rolled back as expected')
|
||||
})
|
||||
|
||||
// Verify balance wasn't changed after rollback
|
||||
const finalResult = await sql`
|
||||
SELECT balance FROM test_users WHERE name = 'Alice'
|
||||
`
|
||||
expect(finalResult[0].balance).toBe(initialBalance)
|
||||
})
|
||||
|
||||
it('should rollback a transaction on error', async () => {
|
||||
// Create table
|
||||
await sql`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
balance INTEGER DEFAULT 0
|
||||
)
|
||||
`
|
||||
|
||||
// Insert initial data
|
||||
await sql`
|
||||
INSERT INTO test_users (name, balance)
|
||||
VALUES ('Alice', 100), ('Bob', 50)
|
||||
`
|
||||
|
||||
// Start a transaction that will fail
|
||||
try {
|
||||
await sql.begin(async (tx) => {
|
||||
// Deduct from Alice
|
||||
await tx`
|
||||
UPDATE test_users
|
||||
SET balance = balance - 30
|
||||
WHERE name = 'Alice'
|
||||
`
|
||||
|
||||
// This will trigger an error
|
||||
await tx`
|
||||
UPDATE test_users_nonexistent
|
||||
SET balance = balance + 30
|
||||
WHERE name = 'Bob'
|
||||
`
|
||||
})
|
||||
} catch (error) {
|
||||
// Expected to fail
|
||||
}
|
||||
|
||||
// Verify Alice's balance was not changed due to rollback
|
||||
const users =
|
||||
await sql`SELECT name, balance FROM test_users ORDER BY name`
|
||||
|
||||
expect(users.length).toBe(2)
|
||||
expect(users[0].name).toBe('Alice')
|
||||
expect(users[0].balance).toBe(100) // Should remain 100 after rollback
|
||||
})
|
||||
|
||||
it('should handle a syntax error', async () => {
|
||||
// Expect syntax error
|
||||
let errorMessage = ''
|
||||
try {
|
||||
await sql`THIS IS NOT VALID SQL;`
|
||||
} catch (error) {
|
||||
errorMessage = (error as Error).message
|
||||
}
|
||||
|
||||
expect(errorMessage).not.toBe('')
|
||||
expect(errorMessage.toLowerCase()).toContain('syntax error')
|
||||
})
|
||||
|
||||
it('should support cursor-based pagination', async () => {
|
||||
// Create a test table with many rows
|
||||
await sql`
|
||||
CREATE TABLE test_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
value INTEGER
|
||||
)
|
||||
`
|
||||
|
||||
// Insert 100 rows using generate_series (server-side generation)
|
||||
await sql`
|
||||
INSERT INTO test_users (name, value)
|
||||
SELECT
|
||||
'User ' || i as name,
|
||||
i as value
|
||||
FROM generate_series(1, 100) as i
|
||||
`
|
||||
|
||||
// Use a cursor to read data in smaller chunks
|
||||
const chunkSize = 10
|
||||
let results: any[] = []
|
||||
let page = 0
|
||||
|
||||
// Use a transaction for cursor operations (cursors must be in transactions)
|
||||
await sql.begin(async (tx) => {
|
||||
// Declare a cursor
|
||||
await tx`DECLARE user_cursor CURSOR FOR SELECT * FROM test_users ORDER BY id`
|
||||
|
||||
let hasMoreData = true
|
||||
while (hasMoreData) {
|
||||
// Fetch a batch of results
|
||||
const chunk = await tx`FETCH 10 FROM user_cursor`
|
||||
|
||||
// If no rows returned, we're done
|
||||
if (chunk.length === 0) {
|
||||
hasMoreData = false
|
||||
continue
|
||||
}
|
||||
|
||||
// Process this chunk
|
||||
page++
|
||||
|
||||
// Add to our results array
|
||||
results = [...results, ...chunk]
|
||||
|
||||
// Verify each chunk has correct data (except possibly the last one)
|
||||
if (chunk.length === chunkSize) {
|
||||
expect(chunk.length).toBe(chunkSize)
|
||||
expect(chunk[0].id).toBe((page - 1) * chunkSize + 1)
|
||||
}
|
||||
}
|
||||
|
||||
// Close the cursor
|
||||
await tx`CLOSE user_cursor`
|
||||
})
|
||||
|
||||
// Verify we got all 100 records
|
||||
expect(results.length).toBe(100)
|
||||
expect(results[0].name).toBe('User 1')
|
||||
expect(results[99].name).toBe('User 100')
|
||||
|
||||
// Verify we received the expected number of pages
|
||||
expect(page).toBe(Math.ceil(100 / chunkSize))
|
||||
})
|
||||
} else {
|
||||
it('should support LISTEN/NOTIFY for pub/sub messaging', async () => {
|
||||
// Create a promise that will resolve when the notification is received
|
||||
let receivedPayload = ''
|
||||
const notificationPromise = new Promise<void>((resolve) => {
|
||||
// Set up listener for the 'test_channel' notification
|
||||
sql.listen('test_channel', (data) => {
|
||||
receivedPayload = data
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
|
||||
// Small delay to ensure listener is set up
|
||||
// await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
|
||||
// Send a notification on the same connection
|
||||
await sql`NOTIFY test_channel, 'Hello from PGlite!'`
|
||||
|
||||
// Wait for the notification to be received
|
||||
await notificationPromise
|
||||
|
||||
// Verify the notification was received with the correct payload
|
||||
expect(receivedPayload).toBe('Hello from PGlite!')
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
describe('with extensions via CLI', () => {
|
||||
const UNIX_SOCKET_DIR_PATH = `/tmp/${Date.now().toString()}`
|
||||
fs.mkdirSync(UNIX_SOCKET_DIR_PATH)
|
||||
const UNIX_SOCKET_PATH = `${UNIX_SOCKET_DIR_PATH}/.s.PGSQL.5432`
|
||||
let serverProcess: ChildProcess | null = null
|
||||
let sql: ReturnType<typeof postgres>
|
||||
|
||||
beforeAll(async () => {
|
||||
// Start the server with extensions via CLI using tsx for dev or node for dist
|
||||
const serverScript = join(__dirname, '../src/scripts/server.ts')
|
||||
serverProcess = spawn(
|
||||
'npx',
|
||||
[
|
||||
'tsx',
|
||||
serverScript,
|
||||
'--path',
|
||||
UNIX_SOCKET_PATH,
|
||||
'--extensions',
|
||||
'vector,pg_uuidv7,@electric-sql/pglite/pg_hashids:pg_hashids',
|
||||
],
|
||||
{
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
},
|
||||
)
|
||||
|
||||
// Wait for server to be ready by checking for "listening" message
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const timeout = setTimeout(() => {
|
||||
reject(new Error('Server startup timeout'))
|
||||
}, 30000)
|
||||
|
||||
const onData = (data: Buffer) => {
|
||||
const output = data.toString()
|
||||
if (output.includes('listening')) {
|
||||
clearTimeout(timeout)
|
||||
resolve()
|
||||
}
|
||||
}
|
||||
|
||||
serverProcess!.stdout?.on('data', onData)
|
||||
serverProcess!.stderr?.on('data', (data) => {
|
||||
console.error('Server stderr:', data.toString())
|
||||
})
|
||||
|
||||
serverProcess!.on('error', (err) => {
|
||||
clearTimeout(timeout)
|
||||
reject(err)
|
||||
})
|
||||
|
||||
serverProcess!.on('exit', (code) => {
|
||||
if (code !== 0 && code !== null) {
|
||||
clearTimeout(timeout)
|
||||
reject(new Error(`Server exited with code ${code}`))
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
console.log('Server with extensions started')
|
||||
|
||||
sql = postgres({
|
||||
path: UNIX_SOCKET_PATH,
|
||||
database: 'postgres',
|
||||
username: 'postgres',
|
||||
password: 'postgres',
|
||||
idle_timeout: 5,
|
||||
connect_timeout: 10,
|
||||
max: 1,
|
||||
})
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
if (sql) {
|
||||
await sql.end().catch(() => {})
|
||||
}
|
||||
|
||||
if (serverProcess) {
|
||||
serverProcess.kill('SIGTERM')
|
||||
await new Promise<void>((resolve) => {
|
||||
serverProcess!.on('exit', () => resolve())
|
||||
setTimeout(resolve, 2000) // Force resolve after 2s
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
it('should load and use vector extension', async () => {
|
||||
// Create the extension
|
||||
await sql`CREATE EXTENSION IF NOT EXISTS vector`
|
||||
|
||||
// Verify extension is loaded
|
||||
const extCheck = await sql`
|
||||
SELECT extname FROM pg_extension WHERE extname = 'vector'
|
||||
`
|
||||
expect(extCheck).toHaveLength(1)
|
||||
expect(extCheck[0].extname).toBe('vector')
|
||||
|
||||
// Create a table with vector column
|
||||
await sql`
|
||||
CREATE TABLE test_vectors (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT,
|
||||
vec vector(3)
|
||||
)
|
||||
`
|
||||
|
||||
// Insert test data
|
||||
await sql`
|
||||
INSERT INTO test_vectors (name, vec) VALUES
|
||||
('test1', '[1,2,3]'),
|
||||
('test2', '[4,5,6]'),
|
||||
('test3', '[7,8,9]')
|
||||
`
|
||||
|
||||
// Query with vector distance
|
||||
const result = await sql`
|
||||
SELECT name, vec, vec <-> '[3,1,2]' AS distance
|
||||
FROM test_vectors
|
||||
ORDER BY distance
|
||||
`
|
||||
|
||||
expect(result).toHaveLength(3)
|
||||
expect(result[0].name).toBe('test1')
|
||||
expect(result[0].vec).toBe('[1,2,3]')
|
||||
expect(parseFloat(result[0].distance)).toBeCloseTo(2.449, 2)
|
||||
})
|
||||
|
||||
it('should load and use pg_uuidv7 extension', async () => {
|
||||
// Create the extension
|
||||
await sql`CREATE EXTENSION IF NOT EXISTS pg_uuidv7`
|
||||
|
||||
// Verify extension is loaded
|
||||
const extCheck = await sql`
|
||||
SELECT extname FROM pg_extension WHERE extname = 'pg_uuidv7'
|
||||
`
|
||||
expect(extCheck).toHaveLength(1)
|
||||
expect(extCheck[0].extname).toBe('pg_uuidv7')
|
||||
|
||||
// Generate a UUIDv7
|
||||
const result = await sql`SELECT uuid_generate_v7() as uuid`
|
||||
expect(result[0].uuid).toHaveLength(36)
|
||||
|
||||
// Test uuid_v7_to_timestamptz function
|
||||
const tsResult = await sql`
|
||||
SELECT uuid_v7_to_timestamptz('018570bb-4a7d-7c7e-8df4-6d47afd8c8fc') as ts
|
||||
`
|
||||
const timestamp = new Date(tsResult[0].ts)
|
||||
expect(timestamp.toISOString()).toBe('2023-01-02T04:26:40.637Z')
|
||||
})
|
||||
|
||||
it('should load and use pg_hashids extension from npm package path', async () => {
|
||||
// Create the extension
|
||||
await sql`CREATE EXTENSION IF NOT EXISTS pg_hashids`
|
||||
|
||||
// Verify extension is loaded
|
||||
const extCheck = await sql`
|
||||
SELECT extname FROM pg_extension WHERE extname = 'pg_hashids'
|
||||
`
|
||||
expect(extCheck).toHaveLength(1)
|
||||
expect(extCheck[0].extname).toBe('pg_hashids')
|
||||
|
||||
// Test id_encode function
|
||||
const result = await sql`
|
||||
SELECT id_encode(1234567, 'salt', 10, 'abcdefghijABCDEFGHIJ1234567890') as hash
|
||||
`
|
||||
expect(result[0].hash).toBeTruthy()
|
||||
expect(typeof result[0].hash).toBe('string')
|
||||
|
||||
// Test id_decode function (round-trip)
|
||||
const hash = result[0].hash
|
||||
const decodeResult = await sql`
|
||||
SELECT id_decode(${hash}, 'salt', 10, 'abcdefghijABCDEFGHIJ1234567890') as id
|
||||
`
|
||||
expect(decodeResult[0].id[0]).toBe('1234567')
|
||||
})
|
||||
})
|
||||
})
|
||||
233
_node_modules/@electric-sql/pglite-socket/tests/server.test.ts
generated
Normal file
233
_node_modules/@electric-sql/pglite-socket/tests/server.test.ts
generated
Normal file
@@ -0,0 +1,233 @@
|
||||
import { describe, it, expect, afterEach } from 'vitest'
|
||||
import { spawn, ChildProcess } from 'node:child_process'
|
||||
import { createConnection } from 'net'
|
||||
import path from 'node:path'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url))
|
||||
const serverScript = path.resolve(__dirname, '../src/scripts/server.ts')
|
||||
|
||||
// Helper to wait for a port to be available
|
||||
async function waitForPort(port: number, timeout = 15000): Promise<boolean> {
|
||||
const start = Date.now()
|
||||
|
||||
while (Date.now() - start < timeout) {
|
||||
try {
|
||||
const socket = createConnection({ port, host: '127.0.0.1' })
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
socket.on('connect', () => {
|
||||
socket.end()
|
||||
resolve()
|
||||
})
|
||||
socket.on('error', reject)
|
||||
})
|
||||
return true
|
||||
} catch {
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
describe('Server Script Tests', () => {
|
||||
const TEST_PORT_BASE = 15500
|
||||
let currentTestPort = TEST_PORT_BASE
|
||||
|
||||
// Get a unique port for each test
|
||||
function getTestPort(): number {
|
||||
return ++currentTestPort
|
||||
}
|
||||
|
||||
describe('Help and Basic Functionality', () => {
|
||||
it('should show help when --help flag is used', async () => {
|
||||
const serverProcess = spawn('tsx', [serverScript, '--help'], {
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
})
|
||||
|
||||
let output = ''
|
||||
serverProcess.stdout?.on('data', (data) => {
|
||||
output += data.toString()
|
||||
})
|
||||
|
||||
serverProcess.stderr?.on('data', (data) => {
|
||||
console.error(data.toString())
|
||||
})
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
serverProcess.on('exit', (code) => {
|
||||
expect(code).toBe(0)
|
||||
expect(output).toContain('PGlite Socket Server')
|
||||
expect(output).toContain('Usage:')
|
||||
expect(output).toContain('Options:')
|
||||
expect(output).toContain('--db')
|
||||
expect(output).toContain('--port')
|
||||
expect(output).toContain('--host')
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}, 10000)
|
||||
|
||||
it('should accept and use debug level parameter', async () => {
|
||||
const testPort = getTestPort()
|
||||
const serverProcess = spawn(
|
||||
'tsx',
|
||||
[serverScript, '--port', testPort.toString(), '--debug', '2'],
|
||||
{
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
},
|
||||
)
|
||||
|
||||
let output = ''
|
||||
serverProcess.stdout?.on('data', (data) => {
|
||||
output += data.toString()
|
||||
})
|
||||
|
||||
serverProcess.stderr?.on('data', (data) => {
|
||||
console.error(data.toString())
|
||||
})
|
||||
|
||||
// Wait for server to start
|
||||
await waitForPort(testPort)
|
||||
|
||||
// Kill the server
|
||||
serverProcess.kill('SIGTERM')
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
serverProcess.on('exit', () => {
|
||||
expect(output).toContain('Debug level: 2')
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}, 10000)
|
||||
})
|
||||
|
||||
describe('Server Startup and Connectivity', () => {
|
||||
let serverProcess: ChildProcess | null = null
|
||||
|
||||
afterEach(async () => {
|
||||
if (serverProcess) {
|
||||
serverProcess.kill('SIGTERM')
|
||||
await new Promise<void>((resolve) => {
|
||||
if (serverProcess) {
|
||||
serverProcess.on('exit', () => resolve())
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
serverProcess = null
|
||||
}
|
||||
})
|
||||
|
||||
it('should start server on TCP port and accept connections', async () => {
|
||||
const testPort = getTestPort()
|
||||
|
||||
serverProcess = spawn(
|
||||
'tsx',
|
||||
[serverScript, '--port', testPort.toString()],
|
||||
{
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
},
|
||||
)
|
||||
|
||||
let output = ''
|
||||
serverProcess.stdout?.on('data', (data) => {
|
||||
output += data.toString()
|
||||
})
|
||||
|
||||
serverProcess.stderr?.on('data', (data) => {
|
||||
console.error(data.toString())
|
||||
})
|
||||
|
||||
// Wait for server to be ready
|
||||
const isReady = await waitForPort(testPort)
|
||||
expect(isReady).toBe(true)
|
||||
|
||||
// Check that we can connect
|
||||
const socket = createConnection({ port: testPort, host: '127.0.0.1' })
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
socket.on('connect', resolve)
|
||||
socket.on('error', reject)
|
||||
setTimeout(() => reject(new Error('Connection timeout')), 3000)
|
||||
})
|
||||
socket.end()
|
||||
|
||||
expect(output).toContain('PGlite database initialized')
|
||||
expect(output).toContain(`"port":${testPort}`)
|
||||
}, 10000)
|
||||
|
||||
it('should work with memory database', async () => {
|
||||
const testPort = getTestPort()
|
||||
|
||||
serverProcess = spawn(
|
||||
'tsx',
|
||||
[serverScript, '--port', testPort.toString(), '--db', 'memory://'],
|
||||
{
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
},
|
||||
)
|
||||
|
||||
let output = ''
|
||||
serverProcess.stdout?.on('data', (data) => {
|
||||
output += data.toString()
|
||||
})
|
||||
|
||||
serverProcess.stderr?.on('data', (data) => {
|
||||
console.error(data.toString())
|
||||
})
|
||||
|
||||
const isReady = await waitForPort(testPort)
|
||||
expect(isReady).toBe(true)
|
||||
expect(output).toContain('Initializing PGLite with database: memory://')
|
||||
}, 10000)
|
||||
})
|
||||
|
||||
describe('Configuration Options', () => {
|
||||
let serverProcess: ChildProcess | null = null
|
||||
|
||||
afterEach(async () => {
|
||||
if (serverProcess) {
|
||||
serverProcess.kill('SIGTERM')
|
||||
await new Promise<void>((resolve) => {
|
||||
if (serverProcess) {
|
||||
serverProcess.on('exit', () => resolve())
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
serverProcess = null
|
||||
}
|
||||
})
|
||||
|
||||
it('should handle different hosts', async () => {
|
||||
const testPort = getTestPort()
|
||||
|
||||
serverProcess = spawn(
|
||||
'tsx',
|
||||
[serverScript, '--port', testPort.toString(), '--host', '0.0.0.0'],
|
||||
{
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
},
|
||||
)
|
||||
|
||||
let output = ''
|
||||
serverProcess.stdout?.on('data', (data) => {
|
||||
output += data.toString()
|
||||
})
|
||||
|
||||
serverProcess.stderr?.on('data', (data) => {
|
||||
console.error(data.toString())
|
||||
})
|
||||
|
||||
const isReady = await waitForPort(testPort)
|
||||
expect(isReady).toBe(true)
|
||||
serverProcess.kill()
|
||||
await new Promise<void>((resolve) => {
|
||||
serverProcess.on('exit', () => {
|
||||
expect(output).toContain(`"host":"0.0.0.0"`)
|
||||
serverProcess = null
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}, 10000)
|
||||
})
|
||||
})
|
||||
10
_node_modules/@electric-sql/pglite-socket/tsconfig.json
generated
Normal file
10
_node_modules/@electric-sql/pglite-socket/tsconfig.json
generated
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"types": [
|
||||
"@types/emscripten",
|
||||
"node"
|
||||
]
|
||||
},
|
||||
"include": ["src", "examples", "tsup.config.ts", "vitest.config.ts"]
|
||||
}
|
||||
20
_node_modules/@electric-sql/pglite-socket/tsup.config.ts
generated
Normal file
20
_node_modules/@electric-sql/pglite-socket/tsup.config.ts
generated
Normal file
@@ -0,0 +1,20 @@
|
||||
import { defineConfig } from 'tsup'
|
||||
|
||||
const entryPoints = ['src/index.ts', 'src/scripts/server.ts']
|
||||
|
||||
const minify = process.env.DEBUG === 'true' ? false : true
|
||||
|
||||
export default defineConfig([
|
||||
{
|
||||
entry: entryPoints,
|
||||
sourcemap: true,
|
||||
dts: {
|
||||
entry: entryPoints,
|
||||
resolve: true,
|
||||
},
|
||||
clean: true,
|
||||
minify: minify,
|
||||
shims: true,
|
||||
format: ['esm', 'cjs'],
|
||||
},
|
||||
])
|
||||
16
_node_modules/@electric-sql/pglite-socket/vitest.config.ts
generated
Normal file
16
_node_modules/@electric-sql/pglite-socket/vitest.config.ts
generated
Normal file
@@ -0,0 +1,16 @@
|
||||
import { defineConfig } from 'vitest/config'
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
name: 'integration tests',
|
||||
globals: true,
|
||||
typecheck: { enabled: true },
|
||||
environment: 'node',
|
||||
testTimeout: 30000,
|
||||
watch: false,
|
||||
dir: './tests',
|
||||
maxWorkers: 1,
|
||||
fileParallelism: false,
|
||||
maxConcurrency: 1 // because we are running a TCP server on a port
|
||||
},
|
||||
})
|
||||
165
_node_modules/@electric-sql/pglite-tools/CHANGELOG.md
generated
Normal file
165
_node_modules/@electric-sql/pglite-tools/CHANGELOG.md
generated
Normal file
@@ -0,0 +1,165 @@
|
||||
# @electric-sql/pglite-tools
|
||||
|
||||
## 0.2.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [45bff97]
|
||||
- Updated dependencies [5ec474f]
|
||||
- @electric-sql/pglite@0.3.15
|
||||
|
||||
## 0.2.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [8785034]
|
||||
- Updated dependencies [90cfee8]
|
||||
- @electric-sql/pglite@0.3.14
|
||||
|
||||
## 0.2.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- ad3d0d8: Updated pg_dump to use callback data exchange; built pg_dump with emscripten
|
||||
- Updated dependencies [ad3d0d8]
|
||||
- @electric-sql/pglite@0.3.13
|
||||
|
||||
## 0.2.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ce0e74e]
|
||||
- @electric-sql/pglite@0.3.12
|
||||
|
||||
## 0.2.16
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [9a104b9]
|
||||
- @electric-sql/pglite@0.3.11
|
||||
|
||||
## 0.2.15
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ad765ed]
|
||||
- @electric-sql/pglite@0.3.10
|
||||
|
||||
## 0.2.14
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- e40ccad: Upgrade emsdk
|
||||
- Updated dependencies [e40ccad]
|
||||
- @electric-sql/pglite@0.3.9
|
||||
|
||||
## 0.2.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- be677b4: fix pg_dump on Windows systems
|
||||
|
||||
When calling **pg_dump** on Windows system the function fails with an error as the one bellow.
|
||||
❗ Notice the double drive letter
|
||||
`Error: ENOENT: no such file or directory, open 'E:\C:\Users\<USERNAME>\AppData\Local\npm-cache\_npx\ba4f1959e38407b5\node_modules\@electric-sql\pglite-tools\dist\pg_dump.wasm'`
|
||||
|
||||
The problem is in execPgDump function at line
|
||||
`const blob = await fs.readFile(bin.toString().slice(7))`
|
||||
I think the intention here was to remove `file://` from the begging of the path. However this is not necesarry readFile can handle URL objects.
|
||||
Moreover this will fail on Windows becase the slice creates a path like '/C:/<USERNAME>...' and the readFile function will add the extra drive letter
|
||||
|
||||
- Updated dependencies [f12a582]
|
||||
- Updated dependencies [bd263aa]
|
||||
- @electric-sql/pglite@0.3.8
|
||||
|
||||
## 0.2.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [0936962]
|
||||
- @electric-sql/pglite@0.3.7
|
||||
|
||||
## 0.2.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [6898469]
|
||||
- Updated dependencies [469be18]
|
||||
- Updated dependencies [64e33c7]
|
||||
- @electric-sql/pglite@0.3.6
|
||||
|
||||
## 0.2.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 8172b72: new pg_dump wasm blob
|
||||
- Updated dependencies [6653899]
|
||||
- Updated dependencies [5f007fc]
|
||||
- @electric-sql/pglite@0.3.5
|
||||
|
||||
## 0.2.9
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 38a55d0: fix cjs/esm misconfigurations
|
||||
- Updated dependencies [1fcaa3e]
|
||||
- Updated dependencies [38a55d0]
|
||||
- Updated dependencies [aac7003]
|
||||
- Updated dependencies [8ca254d]
|
||||
- @electric-sql/pglite@0.3.4
|
||||
|
||||
## 0.2.8
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ea2c7c7]
|
||||
- @electric-sql/pglite@0.3.3
|
||||
|
||||
## 0.2.7
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [e2c654b]
|
||||
- @electric-sql/pglite@0.3.2
|
||||
|
||||
## 0.2.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [713364e]
|
||||
- @electric-sql/pglite@0.3.1
|
||||
|
||||
## 0.2.5
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 317fd36: Specify a peer dependency range on @electric-sql/pglite
|
||||
- Updated dependencies [97e52f7]
|
||||
- Updated dependencies [4356024]
|
||||
- Updated dependencies [0033bc7]
|
||||
- @electric-sql/pglite@0.3.0
|
||||
|
||||
## 0.2.4
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- bbfa9f1: Restore SEARCH_PATH after pg_dump
|
||||
|
||||
## 0.2.3
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 8545760: pg_dump error messages set on the thrown Error
|
||||
- d26e658: Run a DEALLOCATE ALL after each pg_dump to cleanup the prepared statements.
|
||||
|
||||
## 0.2.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 17c9875: add node imports to the package.json browser excludes
|
||||
|
||||
## 0.2.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 6547374: Alpha version of pg_dump support in the browser and Node using a WASM build of pg_dump
|
||||
176
_node_modules/@electric-sql/pglite-tools/LICENSE
generated
Normal file
176
_node_modules/@electric-sql/pglite-tools/LICENSE
generated
Normal file
@@ -0,0 +1,176 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
72
_node_modules/@electric-sql/pglite-tools/README.md
generated
Normal file
72
_node_modules/@electric-sql/pglite-tools/README.md
generated
Normal file
@@ -0,0 +1,72 @@
|
||||
# pglite-tools
|
||||
|
||||
A selection of tools for working with [PGlite](https://github.com/electric-sql/pglite) databases, including pg_dump.
|
||||
|
||||
Install with:
|
||||
|
||||
```bash
|
||||
npm install @electric-sql/pglite-tools
|
||||
```
|
||||
|
||||
## `pgDump`
|
||||
|
||||
pg_dump is a tool for dumping a PGlite database to a SQL file, this is a WASM build of pg_dump that can be used in a browser or other JavaScript environments. You can read more about pg_dump [in the Postgres docs](https://www.postgresql.org/docs/current/app-pgdump.html).
|
||||
|
||||
Note: pg_dump will execute `DEALLOCATE ALL;` after each dump. Since this is running on the same (single) connection, any prepared statements that you have made before running pg_dump will be affected.
|
||||
|
||||
### Options
|
||||
|
||||
- `pg`: A PGlite instance.
|
||||
- `args`: An array of arguments to pass to pg_dump - see [pg_dump docs](https://www.postgresql.org/docs/current/app-pgdump.html) for more details.
|
||||
- `fileName`: The name of the file to write the dump to, defaults to `dump.sql`.
|
||||
|
||||
There are a number of arguments that are automatically added to the end of the command, these are:
|
||||
|
||||
- `--inserts` - use inserts format for the output, this ensures that the dump can be restored by simply passing the output to `pg.exec()`.
|
||||
- `-j 1` - concurrency level, set to 1 as multithreading isn't supported.
|
||||
- `-f /tmp/out.sql` - the output file is always written to `/tmp/out.sql` in the virtual file system.
|
||||
- `-U postgres` - use the postgres user is hard coded.
|
||||
|
||||
### Returns
|
||||
|
||||
- A `File` object containing the dump.
|
||||
|
||||
### Caveats
|
||||
|
||||
- After restoring a dump, you might want to set the same search path as the initial db.
|
||||
|
||||
### Example
|
||||
|
||||
```typescript
|
||||
import { PGlite } from '@electric-sql/pglite'
|
||||
import { pgDump } from '@electric-sql/pglite-tools/pg_dump'
|
||||
|
||||
const pg = await PGlite.create()
|
||||
|
||||
// Create a table and insert some data
|
||||
await pg.exec(`
|
||||
CREATE TABLE test (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT
|
||||
);
|
||||
`)
|
||||
await pg.exec(`
|
||||
INSERT INTO test (name) VALUES ('test');
|
||||
`)
|
||||
|
||||
// store the current search path so it can be used in the restored db
|
||||
const initialSearchPath = (await pg1.query<{ search_path: string }>('SHOW SEARCH_PATH;')).rows[0].search_path
|
||||
|
||||
// Dump the database to a file
|
||||
const dump = await pgDump({ pg })
|
||||
// Get the dump text - used for restore
|
||||
const dumpContent = await dump.text()
|
||||
|
||||
// Create a new database
|
||||
const restoredPG = await PGlite.create()
|
||||
// ... and restore it using the dump
|
||||
await restoredPG.exec(dumpContent)
|
||||
|
||||
// optional - after importing, set search path back to the initial one
|
||||
await restoredPG.exec(`SET search_path TO ${initialSearchPath};`);
|
||||
```
|
||||
4
_node_modules/@electric-sql/pglite-tools/dist/chunk-WAOUATYI.js
generated
vendored
Normal file
4
_node_modules/@electric-sql/pglite-tools/dist/chunk-WAOUATYI.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
_node_modules/@electric-sql/pglite-tools/dist/chunk-WAOUATYI.js.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite-tools/dist/chunk-WAOUATYI.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
_node_modules/@electric-sql/pglite-tools/dist/index.cjs
generated
vendored
Normal file
4
_node_modules/@electric-sql/pglite-tools/dist/index.cjs
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
_node_modules/@electric-sql/pglite-tools/dist/index.cjs.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite-tools/dist/index.cjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
_node_modules/@electric-sql/pglite-tools/dist/index.d.cts
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite-tools/dist/index.d.cts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export { pgDump } from './pg_dump.cjs';
|
||||
import '@electric-sql/pglite';
|
||||
2
_node_modules/@electric-sql/pglite-tools/dist/index.d.ts
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite-tools/dist/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export { pgDump } from './pg_dump.js';
|
||||
import '@electric-sql/pglite';
|
||||
2
_node_modules/@electric-sql/pglite-tools/dist/index.js
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite-tools/dist/index.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import{a as o}from"./chunk-WAOUATYI.js";export{o as pgDump};
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
_node_modules/@electric-sql/pglite-tools/dist/index.js.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite-tools/dist/index.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
||||
4
_node_modules/@electric-sql/pglite-tools/dist/pg_dump.cjs
generated
vendored
Normal file
4
_node_modules/@electric-sql/pglite-tools/dist/pg_dump.cjs
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
_node_modules/@electric-sql/pglite-tools/dist/pg_dump.cjs.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite-tools/dist/pg_dump.cjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
14
_node_modules/@electric-sql/pglite-tools/dist/pg_dump.d.cts
generated
vendored
Normal file
14
_node_modules/@electric-sql/pglite-tools/dist/pg_dump.d.cts
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
import { PGlite } from '@electric-sql/pglite';
|
||||
|
||||
interface PgDumpOptions {
|
||||
pg: PGlite;
|
||||
args?: string[];
|
||||
fileName?: string;
|
||||
verbose?: boolean;
|
||||
}
|
||||
/**
|
||||
* Execute pg_dump
|
||||
*/
|
||||
declare function pgDump({ pg, args, fileName, }: PgDumpOptions): Promise<File>;
|
||||
|
||||
export { pgDump };
|
||||
14
_node_modules/@electric-sql/pglite-tools/dist/pg_dump.d.ts
generated
vendored
Normal file
14
_node_modules/@electric-sql/pglite-tools/dist/pg_dump.d.ts
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
import { PGlite } from '@electric-sql/pglite';
|
||||
|
||||
interface PgDumpOptions {
|
||||
pg: PGlite;
|
||||
args?: string[];
|
||||
fileName?: string;
|
||||
verbose?: boolean;
|
||||
}
|
||||
/**
|
||||
* Execute pg_dump
|
||||
*/
|
||||
declare function pgDump({ pg, args, fileName, }: PgDumpOptions): Promise<File>;
|
||||
|
||||
export { pgDump };
|
||||
2
_node_modules/@electric-sql/pglite-tools/dist/pg_dump.js
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite-tools/dist/pg_dump.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import{a}from"./chunk-WAOUATYI.js";export{a as pgDump};
|
||||
//# sourceMappingURL=pg_dump.js.map
|
||||
1
_node_modules/@electric-sql/pglite-tools/dist/pg_dump.js.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite-tools/dist/pg_dump.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
||||
BIN
_node_modules/@electric-sql/pglite-tools/dist/pg_dump.wasm
generated
vendored
Normal file
BIN
_node_modules/@electric-sql/pglite-tools/dist/pg_dump.wasm
generated
vendored
Normal file
Binary file not shown.
29
_node_modules/@electric-sql/pglite-tools/eslint.config.js
generated
Normal file
29
_node_modules/@electric-sql/pglite-tools/eslint.config.js
generated
Normal file
@@ -0,0 +1,29 @@
|
||||
import globals from 'globals'
|
||||
import rootConfig from '../../eslint.config.js'
|
||||
|
||||
export default [
|
||||
...rootConfig,
|
||||
{
|
||||
ignores: ['release/**/*', 'examples/**/*', 'dist/**/*'],
|
||||
},
|
||||
{
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.browser,
|
||||
...globals.node,
|
||||
},
|
||||
},
|
||||
rules: {
|
||||
...rootConfig.rules,
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['tests/targets/deno/**/*.js'],
|
||||
languageOptions: {
|
||||
globals: {
|
||||
Deno: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
76
_node_modules/@electric-sql/pglite-tools/package.json
generated
Normal file
76
_node_modules/@electric-sql/pglite-tools/package.json
generated
Normal file
@@ -0,0 +1,76 @@
|
||||
{
|
||||
"name": "@electric-sql/pglite-tools",
|
||||
"version": "0.2.20",
|
||||
"description": "Tools for working with PGlite databases",
|
||||
"author": "Electric DB Limited",
|
||||
"homepage": "https://pglite.dev",
|
||||
"license": "Apache-2.0",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/electric-sql/pglite",
|
||||
"directory": "packages/pglite-tools"
|
||||
},
|
||||
"keywords": [
|
||||
"postgres",
|
||||
"sql",
|
||||
"database",
|
||||
"wasm",
|
||||
"pglite",
|
||||
"pg_dump",
|
||||
"pg_restore"
|
||||
],
|
||||
"private": false,
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"type": "module",
|
||||
"main": "./dist/index.cjs",
|
||||
"module": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"default": "./dist/index.js"
|
||||
},
|
||||
"require": {
|
||||
"types": "./dist/index.d.cts",
|
||||
"default": "./dist/index.cjs"
|
||||
}
|
||||
},
|
||||
"./pg_dump": {
|
||||
"import": {
|
||||
"types": "./dist/pg_dump.d.ts",
|
||||
"default": "./dist/pg_dump.js"
|
||||
},
|
||||
"require": {
|
||||
"types": "./dist/pg_dump.d.cts",
|
||||
"default": "./dist/pg_dump.cjs"
|
||||
}
|
||||
}
|
||||
},
|
||||
"browser": {
|
||||
"fs": false,
|
||||
"fs/promises": false
|
||||
},
|
||||
"devDependencies": {
|
||||
"@arethetypeswrong/cli": "^0.18.1",
|
||||
"@types/emscripten": "^1.41.1",
|
||||
"@types/node": "^20.16.11",
|
||||
"tsx": "^4.19.2",
|
||||
"vitest": "^1.3.1",
|
||||
"@electric-sql/pglite": "0.3.15"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@electric-sql/pglite": "0.3.15"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsup",
|
||||
"check:exports": "attw . --pack --profile node16",
|
||||
"lint": "eslint ./src ./tests --report-unused-disable-directives --max-warnings 0",
|
||||
"format": "prettier --write ./src ./tests",
|
||||
"typecheck": "tsc",
|
||||
"stylecheck": "pnpm lint && prettier --check ./src ./tests",
|
||||
"test": "vitest"
|
||||
}
|
||||
}
|
||||
1
_node_modules/@electric-sql/pglite-tools/src/index.ts
generated
Normal file
1
_node_modules/@electric-sql/pglite-tools/src/index.ts
generated
Normal file
@@ -0,0 +1 @@
|
||||
export * from './pg_dump'
|
||||
41
_node_modules/@electric-sql/pglite-tools/src/pgDumpModFactory.ts
generated
Normal file
41
_node_modules/@electric-sql/pglite-tools/src/pgDumpModFactory.ts
generated
Normal file
@@ -0,0 +1,41 @@
|
||||
import PgDumpModFactory from '../release/pg_dump'
|
||||
|
||||
type IDBFS = Emscripten.FileSystemType & {
|
||||
quit: () => void
|
||||
dbs: Record<string, IDBDatabase>
|
||||
}
|
||||
|
||||
export type FS = typeof FS & {
|
||||
filesystems: {
|
||||
MEMFS: Emscripten.FileSystemType
|
||||
NODEFS: Emscripten.FileSystemType
|
||||
IDBFS: IDBFS
|
||||
}
|
||||
quit: () => void
|
||||
}
|
||||
|
||||
export interface PgDumpMod
|
||||
extends Omit<EmscriptenModule, 'preInit' | 'preRun' | 'postRun'> {
|
||||
preInit: Array<{ (mod: PgDumpMod): void }>
|
||||
preRun: Array<{ (mod: PgDumpMod): void }>
|
||||
postRun: Array<{ (mod: PgDumpMod): void }>
|
||||
FS: FS
|
||||
WASM_PREFIX: string
|
||||
INITIAL_MEMORY: number
|
||||
_set_read_write_cbs: (read_cb: number, write_cb: number) => void
|
||||
addFunction: (
|
||||
cb: (ptr: any, length: number) => void,
|
||||
signature: string,
|
||||
) => number
|
||||
removeFunction: (f: number) => void
|
||||
_main: (args: string[]) => number
|
||||
onExit: (status: number) => void
|
||||
print: (test: string) => void
|
||||
printErr: (text: string) => void
|
||||
}
|
||||
|
||||
type PgDumpFactory<T extends PgDumpMod = PgDumpMod> = (
|
||||
moduleOverrides?: Partial<T>,
|
||||
) => Promise<T>
|
||||
|
||||
export default PgDumpModFactory as PgDumpFactory<PgDumpMod>
|
||||
157
_node_modules/@electric-sql/pglite-tools/src/pg_dump.ts
generated
Normal file
157
_node_modules/@electric-sql/pglite-tools/src/pg_dump.ts
generated
Normal file
@@ -0,0 +1,157 @@
|
||||
import { PGlite } from '@electric-sql/pglite'
|
||||
import PgDumpModFactory, { PgDumpMod } from './pgDumpModFactory'
|
||||
|
||||
const dumpFilePath = '/tmp/out.sql'
|
||||
|
||||
/**
|
||||
* Creates a new Uint8Array based on two different ArrayBuffers
|
||||
*
|
||||
* @private
|
||||
* @param {ArrayBuffers} buffer1 The first buffer.
|
||||
* @param {ArrayBuffers} buffer2 The second buffer.
|
||||
* @return {ArrayBuffers} The new ArrayBuffer created out of the two.
|
||||
*/
|
||||
function concat(buffer1: ArrayBuffer, buffer2: ArrayBuffer) {
|
||||
const tmp = new Uint8Array(buffer1.byteLength + buffer2.byteLength)
|
||||
tmp.set(new Uint8Array(buffer1), 0)
|
||||
tmp.set(new Uint8Array(buffer2), buffer1.byteLength)
|
||||
return tmp
|
||||
}
|
||||
|
||||
interface ExecResult {
|
||||
exitCode: number
|
||||
fileContents: string
|
||||
stderr: string
|
||||
stdout: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Inner function to execute pg_dump
|
||||
*/
|
||||
async function execPgDump({
|
||||
pg,
|
||||
args,
|
||||
}: {
|
||||
pg: PGlite
|
||||
args: string[]
|
||||
}): Promise<ExecResult> {
|
||||
let pgdump_write, pgdump_read
|
||||
let exitStatus = 0
|
||||
let stderrOutput: string = ''
|
||||
let stdoutOutput: string = ''
|
||||
const emscriptenOpts: Partial<PgDumpMod> = {
|
||||
arguments: args,
|
||||
noExitRuntime: false,
|
||||
print: (text) => {
|
||||
stdoutOutput += text
|
||||
},
|
||||
printErr: (text) => {
|
||||
stderrOutput += text
|
||||
},
|
||||
onExit: (status: number) => {
|
||||
exitStatus = status
|
||||
},
|
||||
preRun: [
|
||||
(mod: PgDumpMod) => {
|
||||
mod.onRuntimeInitialized = () => {
|
||||
let bufferedBytes: Uint8Array = new Uint8Array()
|
||||
|
||||
pgdump_write = mod.addFunction((ptr: any, length: number) => {
|
||||
let bytes
|
||||
try {
|
||||
bytes = mod.HEAPU8.subarray(ptr, ptr + length)
|
||||
} catch (e: any) {
|
||||
console.error('error', e)
|
||||
throw e
|
||||
}
|
||||
const currentResponse = pg.execProtocolRawSync(bytes)
|
||||
bufferedBytes = concat(bufferedBytes, currentResponse)
|
||||
return length
|
||||
}, 'iii')
|
||||
|
||||
pgdump_read = mod.addFunction((ptr: any, max_length: number) => {
|
||||
let length = bufferedBytes.length
|
||||
if (length > max_length) {
|
||||
length = max_length
|
||||
}
|
||||
try {
|
||||
mod.HEAP8.set(bufferedBytes.subarray(0, length), ptr)
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
}
|
||||
bufferedBytes = bufferedBytes.subarray(length, bufferedBytes.length)
|
||||
return length
|
||||
}, 'iii')
|
||||
|
||||
mod._set_read_write_cbs(pgdump_read, pgdump_write)
|
||||
// default $HOME in emscripten is /home/web_user
|
||||
mod.FS.chmod('/home/web_user/.pgpass', 0o0600) // https://www.postgresql.org/docs/current/libpq-pgpass.html
|
||||
}
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const mod = await PgDumpModFactory(emscriptenOpts)
|
||||
let fileContents = ''
|
||||
if (!exitStatus) {
|
||||
fileContents = mod.FS.readFile(dumpFilePath, { encoding: 'utf8' })
|
||||
}
|
||||
|
||||
return {
|
||||
exitCode: exitStatus,
|
||||
fileContents,
|
||||
stderr: stderrOutput,
|
||||
stdout: stdoutOutput,
|
||||
}
|
||||
}
|
||||
|
||||
interface PgDumpOptions {
|
||||
pg: PGlite
|
||||
args?: string[]
|
||||
fileName?: string
|
||||
verbose?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute pg_dump
|
||||
*/
|
||||
export async function pgDump({
|
||||
pg,
|
||||
args,
|
||||
fileName = 'dump.sql',
|
||||
}: PgDumpOptions) {
|
||||
const getSearchPath = await pg.query<{ search_path: string }>(
|
||||
'SHOW SEARCH_PATH;',
|
||||
)
|
||||
const search_path = getSearchPath.rows[0].search_path
|
||||
|
||||
const baseArgs = [
|
||||
'-U',
|
||||
'postgres',
|
||||
'--inserts',
|
||||
'-j',
|
||||
'1',
|
||||
'-f',
|
||||
dumpFilePath,
|
||||
'postgres',
|
||||
]
|
||||
|
||||
const execResult = await execPgDump({
|
||||
pg,
|
||||
args: [...(args ?? []), ...baseArgs],
|
||||
})
|
||||
|
||||
pg.exec(`DEALLOCATE ALL; SET SEARCH_PATH = ${search_path}`)
|
||||
|
||||
if (execResult.exitCode !== 0) {
|
||||
throw new Error(
|
||||
`pg_dump failed with exit code ${execResult.exitCode}. \nError message: ${execResult.stderr}`,
|
||||
)
|
||||
}
|
||||
|
||||
const file = new File([execResult.fileContents], fileName, {
|
||||
type: 'text/plain',
|
||||
})
|
||||
|
||||
return file
|
||||
}
|
||||
201
_node_modules/@electric-sql/pglite-tools/tests/pg_dump.test.ts
generated
Normal file
201
_node_modules/@electric-sql/pglite-tools/tests/pg_dump.test.ts
generated
Normal file
@@ -0,0 +1,201 @@
|
||||
import { describe, it, expect } from 'vitest'
|
||||
import { PGlite } from '@electric-sql/pglite'
|
||||
import { pgDump } from '../dist/pg_dump.js'
|
||||
import * as fs from 'fs/promises'
|
||||
|
||||
describe('pgDump', () => {
|
||||
it('should dump an empty database', async () => {
|
||||
const pg = await PGlite.create()
|
||||
const dump = await pgDump({ pg })
|
||||
|
||||
expect(dump).toBeInstanceOf(File)
|
||||
expect(dump.name).toBe('dump.sql')
|
||||
|
||||
const content = await dump.text()
|
||||
expect(content).toContain('PostgreSQL database dump')
|
||||
})
|
||||
|
||||
it('should dump an empty database multiple times', async () => {
|
||||
const pg = await PGlite.create()
|
||||
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const fileName = `dump_${i}.sql`
|
||||
const dump = await pgDump({ pg, fileName })
|
||||
|
||||
expect(dump).toBeInstanceOf(File)
|
||||
expect(dump.name).toBe(fileName)
|
||||
|
||||
const content = await dump.text()
|
||||
expect(content).toContain('PostgreSQL database dump')
|
||||
}
|
||||
})
|
||||
|
||||
it('should dump a database with tables and data', async () => {
|
||||
const pg = await PGlite.create()
|
||||
|
||||
// Create test tables and insert data
|
||||
await pg.exec(`
|
||||
CREATE TABLE test1 (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT
|
||||
);
|
||||
INSERT INTO test1 (name) VALUES ('test1-row1');
|
||||
|
||||
CREATE TABLE test2 (
|
||||
id SERIAL PRIMARY KEY,
|
||||
value INTEGER
|
||||
);
|
||||
INSERT INTO test2 (value) VALUES (42);
|
||||
`)
|
||||
|
||||
const dump = await pgDump({ pg })
|
||||
const content = await dump.text()
|
||||
|
||||
// Check for table creation
|
||||
expect(content).toContain('CREATE TABLE public.test1')
|
||||
expect(content).toContain('CREATE TABLE public.test2')
|
||||
|
||||
// Check for data inserts
|
||||
expect(content).toContain('INSERT INTO public.test1')
|
||||
expect(content).toContain("'test1-row1'")
|
||||
expect(content).toContain('INSERT INTO public.test2')
|
||||
expect(content).toContain('42')
|
||||
})
|
||||
|
||||
it('should respect custom filename', async () => {
|
||||
const pg = await PGlite.create()
|
||||
const dump = await pgDump({ pg, fileName: 'custom.sql' })
|
||||
|
||||
expect(dump.name).toBe('custom.sql')
|
||||
})
|
||||
|
||||
it('should handle custom pg_dump arguments', async () => {
|
||||
const pg = await PGlite.create()
|
||||
await pg.exec(`
|
||||
CREATE TABLE test (id SERIAL PRIMARY KEY, name TEXT);
|
||||
INSERT INTO test (name) VALUES ('row1');
|
||||
`)
|
||||
|
||||
// Use --schema-only to exclude data
|
||||
const dump = await pgDump({ pg, args: ['--schema-only'] })
|
||||
const content = await dump.text()
|
||||
|
||||
expect(content).toContain('CREATE TABLE public.test')
|
||||
expect(content).not.toContain('INSERT INTO public.test')
|
||||
})
|
||||
|
||||
it('should be able to restore dumped database', async () => {
|
||||
const pg1 = await PGlite.create()
|
||||
|
||||
// Create original database
|
||||
await pg1.exec(`
|
||||
CREATE TABLE test (id SERIAL PRIMARY KEY, name TEXT);
|
||||
INSERT INTO test (name) VALUES ('row1'), ('row2');
|
||||
`)
|
||||
|
||||
const initialSearchPath = (
|
||||
await pg1.query<{ search_path: string }>('SHOW SEARCH_PATH;')
|
||||
).rows[0].search_path
|
||||
|
||||
// Dump database
|
||||
const dump = await pgDump({ pg: pg1 })
|
||||
const dumpContent = await dump.text()
|
||||
|
||||
// Create new database and restore
|
||||
const pg2 = await PGlite.create()
|
||||
await pg2.exec(dumpContent)
|
||||
|
||||
// after importing, set search path back to the initial one
|
||||
await pg2.exec(`SET search_path TO ${initialSearchPath};`)
|
||||
|
||||
// Verify data
|
||||
const result = await pg2.query<{ name: string }>(
|
||||
'SELECT * FROM test ORDER BY id',
|
||||
)
|
||||
expect(result.rows).toHaveLength(2)
|
||||
expect(result.rows[0].name).toBe('row1')
|
||||
expect(result.rows[1].name).toBe('row2')
|
||||
})
|
||||
|
||||
it('pg_dump should not change SEARCH_PATH', async () => {
|
||||
const pg = await PGlite.create()
|
||||
|
||||
await pg.exec(`SET SEARCH_PATH = amigo;`)
|
||||
const initialSearchPath = await pg.query('SHOW SEARCH_PATH;')
|
||||
|
||||
const dump = await pgDump({ pg })
|
||||
await dump.text()
|
||||
|
||||
const finalSearchPath = await pg.query('SHOW SEARCH_PATH;')
|
||||
|
||||
expect(initialSearchPath).toEqual(finalSearchPath)
|
||||
})
|
||||
|
||||
it('specify datadir: should dump a database with tables and data', async () => {
|
||||
const dataDir = '/tmp/pg_dump_pglite_data_dir'
|
||||
await fs.rm(dataDir, { force: true, recursive: true })
|
||||
const pg = await PGlite.create({
|
||||
dataDir: dataDir,
|
||||
})
|
||||
|
||||
// Create test tables and insert data
|
||||
await pg.exec(`
|
||||
CREATE TABLE test1 (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT
|
||||
);
|
||||
INSERT INTO test1 (name) VALUES ('test1-row1');
|
||||
|
||||
CREATE TABLE test2 (
|
||||
id SERIAL PRIMARY KEY,
|
||||
value INTEGER
|
||||
);
|
||||
INSERT INTO test2 (value) VALUES (42);
|
||||
`)
|
||||
|
||||
const dump = await pgDump({ pg })
|
||||
const content = await dump.text()
|
||||
|
||||
// Check for table creation
|
||||
expect(content).toContain('CREATE TABLE public.test1')
|
||||
expect(content).toContain('CREATE TABLE public.test2')
|
||||
|
||||
// Check for data inserts
|
||||
expect(content).toContain('INSERT INTO public.test1')
|
||||
expect(content).toContain("'test1-row1'")
|
||||
expect(content).toContain('INSERT INTO public.test2')
|
||||
expect(content).toContain('42')
|
||||
})
|
||||
|
||||
it('param --quote-all-identifiers should work', async () => {
|
||||
const pg = await PGlite.create()
|
||||
|
||||
// Create test tables and insert data
|
||||
await pg.exec(`
|
||||
CREATE TABLE test1 (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT
|
||||
);
|
||||
INSERT INTO test1 (name) VALUES ('test1-row1');
|
||||
|
||||
CREATE TABLE test2 (
|
||||
id SERIAL PRIMARY KEY,
|
||||
value INTEGER
|
||||
);
|
||||
INSERT INTO test2 (value) VALUES (42);
|
||||
`)
|
||||
|
||||
const dump = await pgDump({ pg, args: ['--quote-all-identifiers'] })
|
||||
const content = await dump.text()
|
||||
|
||||
// Check for table creation
|
||||
expect(content).toContain('CREATE TABLE "public"."test1"')
|
||||
expect(content).toContain('CREATE TABLE "public"."test2"')
|
||||
|
||||
// Check for data inserts
|
||||
expect(content).toContain('INSERT INTO "public"."test1"')
|
||||
expect(content).toContain("'test1-row1'")
|
||||
expect(content).toContain('INSERT INTO "public"."test2"')
|
||||
expect(content).toContain('42')
|
||||
})
|
||||
})
|
||||
15
_node_modules/@electric-sql/pglite-tools/tests/setup.ts
generated
Normal file
15
_node_modules/@electric-sql/pglite-tools/tests/setup.ts
generated
Normal file
@@ -0,0 +1,15 @@
|
||||
import { beforeAll } from 'vitest'
|
||||
import { execSync } from 'child_process'
|
||||
import { existsSync } from 'fs'
|
||||
import { join } from 'path'
|
||||
|
||||
beforeAll(() => {
|
||||
// Check if we need to build
|
||||
const distPath = join(__dirname, '../dist')
|
||||
const wasmPath = join(distPath, 'pg_dump.wasm')
|
||||
|
||||
if (!existsSync(wasmPath)) {
|
||||
console.log('Building project before running tests...')
|
||||
execSync('pnpm build', { stdio: 'inherit' })
|
||||
}
|
||||
})
|
||||
10
_node_modules/@electric-sql/pglite-tools/tsconfig.json
generated
Normal file
10
_node_modules/@electric-sql/pglite-tools/tsconfig.json
generated
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"types": [
|
||||
"@types/emscripten",
|
||||
"node"
|
||||
]
|
||||
},
|
||||
"include": ["src", "tsup.config.ts", "vitest.config.ts"]
|
||||
}
|
||||
28
_node_modules/@electric-sql/pglite-tools/tsup.config.ts
generated
Normal file
28
_node_modules/@electric-sql/pglite-tools/tsup.config.ts
generated
Normal file
@@ -0,0 +1,28 @@
|
||||
import { cpSync } from 'fs'
|
||||
import { resolve } from 'path'
|
||||
import { defineConfig } from 'tsup'
|
||||
|
||||
const entryPoints = [
|
||||
'src/index.ts',
|
||||
'src/pg_dump.ts',
|
||||
]
|
||||
|
||||
const minify = process.env.DEBUG === 'true' ? false : true
|
||||
|
||||
export default defineConfig([
|
||||
{
|
||||
entry: entryPoints,
|
||||
sourcemap: true,
|
||||
dts: {
|
||||
entry: entryPoints,
|
||||
resolve: true,
|
||||
},
|
||||
clean: true,
|
||||
minify: minify,
|
||||
shims: true,
|
||||
format: ['esm', 'cjs'],
|
||||
onSuccess: async () => {
|
||||
cpSync(resolve('release/pg_dump.wasm'), resolve('dist/pg_dump.wasm'))
|
||||
}
|
||||
},
|
||||
])
|
||||
10
_node_modules/@electric-sql/pglite-tools/vitest.config.ts
generated
Normal file
10
_node_modules/@electric-sql/pglite-tools/vitest.config.ts
generated
Normal file
@@ -0,0 +1,10 @@
|
||||
import { defineConfig } from 'vitest/config'
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
globals: true,
|
||||
environment: 'node',
|
||||
testTimeout: 30000,
|
||||
setupFiles: ['./tests/setup.ts'],
|
||||
},
|
||||
})
|
||||
176
_node_modules/@electric-sql/pglite/LICENSE
generated
Normal file
176
_node_modules/@electric-sql/pglite/LICENSE
generated
Normal file
@@ -0,0 +1,176 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
189
_node_modules/@electric-sql/pglite/README.md
generated
Normal file
189
_node_modules/@electric-sql/pglite/README.md
generated
Normal file
@@ -0,0 +1,189 @@
|
||||
<p align="center">
|
||||
<a href="https://pglite.dev" target="_blank">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)"
|
||||
srcset="https://raw.githubusercontent.com/electric-sql/pglite/main/docs/public/img/brand/logo.svg"
|
||||
/>
|
||||
<source media="(prefers-color-scheme: light)"
|
||||
srcset="https://raw.githubusercontent.com/electric-sql/pglite/main/docs/public/img/brand/logo-light.svg"
|
||||
/>
|
||||
<img alt="ElectricSQL logo"
|
||||
src="https://raw.githubusercontent.com/electric-sql/pglite/main/docs/public/img/brand/logo-light.svg"
|
||||
/>
|
||||
</picture>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://pglite.dev">PGlite</a> - the WASM build of Postgres from <a href="https://electric-sql.com" target="_blank">ElectricSQL</a>.<br>
|
||||
Build reactive, realtime, local-first apps directly on Postgres.
|
||||
<p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/electric-sql/pglite/stargazers/"><img src="https://img.shields.io/github/stars/electric-sql/pglite?style=social&label=Star" /></a>
|
||||
<!-- <a href="https://github.com/electric-sql/pglite/actions"><img src="https://github.com/electric-sql/pglite/workflows/CI/badge.svg" alt="CI"></a> -->
|
||||
<a href="https://github.com/electric-sql/pglite/blob/main/LICENSE"><img src="https://img.shields.io/badge/license-Apache_2.0-green" alt="License - Apache 2.0"></a>
|
||||
<a href="#roadmap"><img src="https://img.shields.io/badge/status-alpha-orange" alt="Status - Alpha"></a>
|
||||
<a href="https://discord.electric-sql.com"><img src="https://img.shields.io/discord/933657521581858818?color=5969EA&label=discord" alt="Chat - Discord"></a>
|
||||
<a href="https://twitter.com/ElectricSQL" target="_blank"><img src="https://img.shields.io/twitter/follow/nestframework.svg?style=social&label=Follow @ElectricSQL"></a>
|
||||
<a href="https://fosstodon.org/@electric" target="_blank"><img src="https://img.shields.io/mastodon/follow/109599644322136925.svg?domain=https%3A%2F%2Ffosstodon.org"></a>
|
||||
</p>
|
||||
|
||||
# PGlite - Postgres in WASM
|
||||
|
||||

|
||||
|
||||
PGlite is a WASM Postgres build packaged into a TypeScript client library that enables you to run Postgres in the browser, Node.js, Bun and Deno, with no need to install any other dependencies. It is only 3mb gzipped and has support for many Postgres extensions, including [pgvector](https://github.com/pgvector/pgvector).
|
||||
|
||||
```javascript
|
||||
import { PGlite } from '@electric-sql/pglite'
|
||||
|
||||
const db = new PGlite()
|
||||
await db.query("select 'Hello world' as message;")
|
||||
// -> { rows: [ { message: "Hello world" } ] }
|
||||
```
|
||||
|
||||
It can be used as an ephemeral in-memory database, or with persistence either to the file system (Node/Bun/Deno) or indexedDB (Browser).
|
||||
|
||||
Unlike previous "Postgres in the browser" projects, PGlite does not use a Linux virtual machine - it is simply Postgres in WASM.
|
||||
|
||||
For full documentation and user guides see [pglite.dev](https://pglite.dev).
|
||||
|
||||
## Browser
|
||||
|
||||
It can be installed and imported using your usual package manager:
|
||||
|
||||
```js
|
||||
import { PGlite } from '@electric-sql/pglite'
|
||||
```
|
||||
|
||||
or using a CDN such as JSDeliver:
|
||||
|
||||
```js
|
||||
import { PGlite } from 'https://cdn.jsdelivr.net/npm/@electric-sql/pglite/dist/index.js'
|
||||
```
|
||||
|
||||
Then for an in-memory Postgres:
|
||||
|
||||
```js
|
||||
const db = new PGlite()
|
||||
await db.query("select 'Hello world' as message;")
|
||||
// -> { rows: [ { message: "Hello world" } ] }
|
||||
```
|
||||
|
||||
or to persist the database to indexedDB:
|
||||
|
||||
```js
|
||||
const db = new PGlite('idb://my-pgdata')
|
||||
```
|
||||
|
||||
## Node/Bun/Deno
|
||||
|
||||
Install into your project:
|
||||
|
||||
**NodeJS**
|
||||
|
||||
```bash
|
||||
npm install @electric-sql/pglite
|
||||
```
|
||||
|
||||
**Bun**
|
||||
|
||||
```bash
|
||||
bun install @electric-sql/pglite
|
||||
```
|
||||
|
||||
**Deno**
|
||||
|
||||
```bash
|
||||
deno add npm:@electric-sql/pglite
|
||||
```
|
||||
|
||||
To use the in-memory Postgres:
|
||||
|
||||
```javascript
|
||||
import { PGlite } from '@electric-sql/pglite'
|
||||
|
||||
const db = new PGlite()
|
||||
await db.query("select 'Hello world' as message;")
|
||||
// -> { rows: [ { message: "Hello world" } ] }
|
||||
```
|
||||
|
||||
or to persist to the filesystem:
|
||||
|
||||
```javascript
|
||||
const db = new PGlite('./path/to/pgdata')
|
||||
```
|
||||
|
||||
## How it works
|
||||
|
||||
PostgreSQL typically operates using a process forking model; whenever a client initiates a connection, a new process is forked to manage that connection. However, programs compiled with Emscripten - a C to WebAssembly (WASM) compiler - cannot fork new processes, and operates strictly in a single-process mode. As a result, PostgreSQL cannot be directly compiled to WASM for conventional operation.
|
||||
|
||||
Fortunately, PostgreSQL includes a "single user mode" primarily intended for command-line usage during bootstrapping and recovery procedures. Building upon this capability, PGlite introduces a input/output pathway that facilitates interaction with PostgreSQL when it is compiled to WASM within a JavaScript environment.
|
||||
|
||||
## Limitations
|
||||
|
||||
- PGlite is single user/connection.
|
||||
|
||||
## How to build PGlite and contribute
|
||||
|
||||
The build process of PGlite is split into two parts:
|
||||
|
||||
1. Building the Postgres WASM module.
|
||||
2. Building the PGlite client library and other TypeScript packages.
|
||||
|
||||
Docker is required to build the WASM module, along with Node (v20 or above) and [pnpm](https://pnpm.io/) for package management and building the TypeScript packages.
|
||||
|
||||
To start checkout the repository and install dependencies:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/electric-sql/pglite
|
||||
cd pglite
|
||||
pnpm install
|
||||
```
|
||||
|
||||
To build everything, we have the convenient `pnpm build:all` command in the root of the repository. This command will:
|
||||
|
||||
1. Use Docker to build the Postgres WASM module. The artifacts from this are then copied to `/packages/pglite/release`.
|
||||
2. Build the PGlite client library and other TypeScript packages.
|
||||
|
||||
To _only_ build the Postgres WASM module (i.e. point 1 above), run
|
||||
|
||||
```bash
|
||||
pnpm wasm:build
|
||||
```
|
||||
|
||||
If you don't want to build the WASM module and assorted WASM binaries from scratch, you can download them from a comment under the most recently merged PR, labeled as _interim build files_, and place them under `packages/pglite/release`.
|
||||
|
||||
To build all TypeScript packages (i.e. point 2 of the above), run:
|
||||
|
||||
```bash
|
||||
pnpm ts:build
|
||||
```
|
||||
|
||||
This will build all packages in the correct order based on their dependency relationships. You can now develop any individual package using the `build` and `test` scripts, as well as the `stylecheck` and `typecheck` scripts to ensure style and type validity.
|
||||
|
||||
Or alternatively to build a single package, move into the package directory and run:
|
||||
|
||||
```bash
|
||||
cd packages/pglite
|
||||
pnpm build
|
||||
```
|
||||
|
||||
When ready to open a PR, run the following command at the root of the repository:
|
||||
|
||||
```bash
|
||||
pnpm changeset
|
||||
```
|
||||
|
||||
And follow the instructions to create an appropriate changeset. Please ensure any contributions that touch code are accompanied by a changeset.
|
||||
|
||||
## Acknowledgments
|
||||
|
||||
PGlite builds on the work of [Stas Kelvich](https://github.com/kelvich) of [Neon](https://neon.tech) in this [Postgres fork](https://github.com/electric-sql/postgres-wasm).
|
||||
|
||||
## License
|
||||
|
||||
PGlite is dual-licensed under the terms of the [Apache License 2.0](https://github.com/electric-sql/pglite/blob/main/LICENSE) and the [PostgreSQL License](https://github.com/electric-sql/pglite/blob/main/POSTGRES-LICENSE), you can choose which you prefer.
|
||||
|
||||
Changes to the [Postgres source](https://github.com/electric-sql/postgres-wasm) are licensed under the PostgreSQL License.
|
||||
BIN
_node_modules/@electric-sql/pglite/dist/amcheck.tar.gz
generated
vendored
Normal file
BIN
_node_modules/@electric-sql/pglite/dist/amcheck.tar.gz
generated
vendored
Normal file
Binary file not shown.
BIN
_node_modules/@electric-sql/pglite/dist/auth_delay.tar.gz
generated
vendored
Normal file
BIN
_node_modules/@electric-sql/pglite/dist/auth_delay.tar.gz
generated
vendored
Normal file
Binary file not shown.
BIN
_node_modules/@electric-sql/pglite/dist/auto_explain.tar.gz
generated
vendored
Normal file
BIN
_node_modules/@electric-sql/pglite/dist/auto_explain.tar.gz
generated
vendored
Normal file
Binary file not shown.
BIN
_node_modules/@electric-sql/pglite/dist/basebackup_to_shell.tar.gz
generated
vendored
Normal file
BIN
_node_modules/@electric-sql/pglite/dist/basebackup_to_shell.tar.gz
generated
vendored
Normal file
Binary file not shown.
BIN
_node_modules/@electric-sql/pglite/dist/basic_archive.tar.gz
generated
vendored
Normal file
BIN
_node_modules/@electric-sql/pglite/dist/basic_archive.tar.gz
generated
vendored
Normal file
Binary file not shown.
BIN
_node_modules/@electric-sql/pglite/dist/bloom.tar.gz
generated
vendored
Normal file
BIN
_node_modules/@electric-sql/pglite/dist/bloom.tar.gz
generated
vendored
Normal file
Binary file not shown.
BIN
_node_modules/@electric-sql/pglite/dist/btree_gin.tar.gz
generated
vendored
Normal file
BIN
_node_modules/@electric-sql/pglite/dist/btree_gin.tar.gz
generated
vendored
Normal file
Binary file not shown.
BIN
_node_modules/@electric-sql/pglite/dist/btree_gist.tar.gz
generated
vendored
Normal file
BIN
_node_modules/@electric-sql/pglite/dist/btree_gist.tar.gz
generated
vendored
Normal file
Binary file not shown.
2
_node_modules/@electric-sql/pglite/dist/chunk-3WWIVTCY.js
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite/dist/chunk-3WWIVTCY.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
_node_modules/@electric-sql/pglite/dist/chunk-3WWIVTCY.js.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite/dist/chunk-3WWIVTCY.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
9
_node_modules/@electric-sql/pglite/dist/chunk-F2DQ4FIK.js
generated
vendored
Normal file
9
_node_modules/@electric-sql/pglite/dist/chunk-F2DQ4FIK.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import{a as v,b as B,c as A,d as q,f as _,g as z,i as l,j as g}from"./chunk-3WWIVTCY.js";import{d as R}from"./chunk-F4GETNPB.js";import{e as P,f as w,g as d,h as a,j as f}from"./chunk-QY3QWFKW.js";f();f();function E(m){let s=m.e;return s.query=m.query,s.params=m.params,s.queryOptions=m.options,s}var T,p,t,y,x,h,O,k=class{constructor(){w(this,t);this.serializers={...B};this.parsers={...v};w(this,T,!1);w(this,p,!1)}async _initArrayTypes({force:s=!1}={}){if(P(this,T)&&!s)return;d(this,T,!0);let e=await this.query(`
|
||||
SELECT b.oid, b.typarray
|
||||
FROM pg_catalog.pg_type a
|
||||
LEFT JOIN pg_catalog.pg_type b ON b.oid = a.typelem
|
||||
WHERE a.typcategory = 'A'
|
||||
GROUP BY b.oid, b.typarray
|
||||
ORDER BY b.oid
|
||||
`);for(let r of e.rows)this.serializers[r.typarray]=o=>A(o,this.serializers[r.oid],r.typarray),this.parsers[r.typarray]=o=>q(o,this.parsers[r.oid],r.typarray)}async refreshArrayTypes(){await this._initArrayTypes({force:!0})}async query(s,e,r){return await this._checkReady(),await this._runExclusiveTransaction(async()=>await a(this,t,x).call(this,s,e,r))}async sql(s,...e){let{query:r,params:o}=R(s,...e);return await this.query(r,o)}async exec(s,e){return await this._checkReady(),await this._runExclusiveTransaction(async()=>await a(this,t,h).call(this,s,e))}async describeQuery(s,e){let r=[];try{await a(this,t,y).call(this,l.parse({text:s,types:e?.paramTypes}),e),r=await a(this,t,y).call(this,l.describe({type:"S"}),e)}catch(n){throw n instanceof g?E({e:n,options:e,params:void 0,query:s}):n}finally{r.push(...await a(this,t,y).call(this,l.sync(),e))}let o=r.find(n=>n.name==="parameterDescription"),i=r.find(n=>n.name==="rowDescription"),c=o?.dataTypeIDs.map(n=>({dataTypeID:n,serializer:this.serializers[n]}))??[],u=i?.fields.map(n=>({name:n.name,dataTypeID:n.dataTypeID,parser:this.parsers[n.dataTypeID]}))??[];return{queryParams:c,resultFields:u}}async transaction(s){return await this._checkReady(),await this._runExclusiveTransaction(async()=>{await a(this,t,h).call(this,"BEGIN"),d(this,p,!0);let e=!1,r=()=>{if(e)throw new Error("Transaction is closed")},o={query:async(i,c,u)=>(r(),await a(this,t,x).call(this,i,c,u)),sql:async(i,...c)=>{let{query:u,params:n}=R(i,...c);return await a(this,t,x).call(this,u,n)},exec:async(i,c)=>(r(),await a(this,t,h).call(this,i,c)),rollback:async()=>{r(),await a(this,t,h).call(this,"ROLLBACK"),e=!0},listen:async(i,c)=>(r(),await this.listen(i,c,o)),get closed(){return e}};try{let i=await s(o);return e||(e=!0,await a(this,t,h).call(this,"COMMIT")),d(this,p,!1),i}catch(i){throw e||await a(this,t,h).call(this,"ROLLBACK"),d(this,p,!1),i}})}async runExclusive(s){return await this._runExclusiveQuery(s)}};T=new WeakMap,p=new WeakMap,t=new WeakSet,y=async function(s,e={}){return await this.execProtocolStream(s,{...e,syncToFs:!1})},x=async function(s,e=[],r){return await this._runExclusiveQuery(async()=>{a(this,t,O).call(this,"runQuery",s,e,r),await this._handleBlob(r?.blob);let o=[];try{let c=await a(this,t,y).call(this,l.parse({text:s,types:r?.paramTypes}),r),u=z(await a(this,t,y).call(this,l.describe({type:"S"}),r)),n=e.map((b,S)=>{let D=u[S];if(b==null)return null;let Q=r?.serializers?.[D]??this.serializers[D];return Q?Q(b):b.toString()});o=[...c,...await a(this,t,y).call(this,l.bind({values:n}),r),...await a(this,t,y).call(this,l.describe({type:"P"}),r),...await a(this,t,y).call(this,l.execute({}),r)]}catch(c){throw c instanceof g?E({e:c,options:r,params:e,query:s}):c}finally{o.push(...await a(this,t,y).call(this,l.sync(),r))}await this._cleanupBlob(),P(this,p)||await this.syncToFs();let i=await this._getWrittenBlob();return _(o,this.parsers,r,i)[0]})},h=async function(s,e){return await this._runExclusiveQuery(async()=>{a(this,t,O).call(this,"runExec",s,e),await this._handleBlob(e?.blob);let r=[];try{r=await a(this,t,y).call(this,l.query(s),e)}catch(i){throw i instanceof g?E({e:i,options:e,params:void 0,query:s}):i}finally{r.push(...await a(this,t,y).call(this,l.sync(),e))}this._cleanupBlob(),P(this,p)||await this.syncToFs();let o=await this._getWrittenBlob();return _(r,this.parsers,e,o)})},O=function(...s){this.debug>0&&console.log(...s)};export{k as a};
|
||||
//# sourceMappingURL=chunk-F2DQ4FIK.js.map
|
||||
1
_node_modules/@electric-sql/pglite/dist/chunk-F2DQ4FIK.js.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite/dist/chunk-F2DQ4FIK.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
_node_modules/@electric-sql/pglite/dist/chunk-F4GETNPB.js
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite/dist/chunk-F4GETNPB.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import{j as l}from"./chunk-QY3QWFKW.js";l();var o={part:"part",container:"container"};function s(t,r,...e){let a=t.length-1,p=e.length-1;if(p!==-1){if(p===0){t[a]=t[a]+e[0]+r;return}t[a]=t[a]+e[0],t.push(...e.slice(1,p)),t.push(e[p]+r)}}function y(t,...r){let e=[t[0]];e.raw=[t.raw[0]];let a=[];for(let p=0;p<r.length;p++){let n=r[p],i=p+1;if(n?._templateType===o.part){s(e,t[i],n.str),s(e.raw,t.raw[i],n.str);continue}if(n?._templateType===o.container){s(e,t[i],...n.strings),s(e.raw,t.raw[i],...n.strings.raw),a.push(...n.values);continue}e.push(t[i]),e.raw.push(t.raw[i]),a.push(n)}return{_templateType:"container",strings:e,values:a}}function c(t,...r){return{_templateType:"part",str:`"${String.raw(t,...r)}"`}}function m(t,...r){return{_templateType:"part",str:String.raw(t,...r)}}function g(t,...r){let{strings:e,values:a}=y(t,...r);return{query:[e[0],...a.flatMap((p,n)=>[`$${n+1}`,e[n+1]])].join(""),params:a}}export{y as a,c as b,m as c,g as d};
|
||||
//# sourceMappingURL=chunk-F4GETNPB.js.map
|
||||
1
_node_modules/@electric-sql/pglite/dist/chunk-F4GETNPB.js.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite/dist/chunk-F4GETNPB.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
_node_modules/@electric-sql/pglite/dist/chunk-QY3QWFKW.js
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite/dist/chunk-QY3QWFKW.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
var p=Object.create;var i=Object.defineProperty;var c=Object.getOwnPropertyDescriptor;var f=Object.getOwnPropertyNames;var l=Object.getPrototypeOf,s=Object.prototype.hasOwnProperty;var a=t=>{throw TypeError(t)};var _=(t,e,o)=>e in t?i(t,e,{enumerable:!0,configurable:!0,writable:!0,value:o}):t[e]=o;var d=(t,e)=>()=>(t&&(e=t(t=0)),e);var D=(t,e)=>()=>(e||t((e={exports:{}}).exports,e),e.exports),F=(t,e)=>{for(var o in e)i(t,o,{get:e[o],enumerable:!0})},g=(t,e,o,m)=>{if(e&&typeof e=="object"||typeof e=="function")for(let r of f(e))!s.call(t,r)&&r!==o&&i(t,r,{get:()=>e[r],enumerable:!(m=c(e,r))||m.enumerable});return t};var L=(t,e,o)=>(o=t!=null?p(l(t)):{},g(e||!t||!t.__esModule?i(o,"default",{value:t,enumerable:!0}):o,t));var P=(t,e,o)=>_(t,typeof e!="symbol"?e+"":e,o),n=(t,e,o)=>e.has(t)||a("Cannot "+o);var h=(t,e,o)=>(n(t,e,"read from private field"),o?o.call(t):e.get(t)),R=(t,e,o)=>e.has(t)?a("Cannot add the same private member more than once"):e instanceof WeakSet?e.add(t):e.set(t,o),x=(t,e,o,m)=>(n(t,e,"write to private field"),m?m.call(t,o):e.set(t,o),o),T=(t,e,o)=>(n(t,e,"access private method"),o);var U=(t,e,o,m)=>({set _(r){x(t,e,r,o)},get _(){return h(t,e,m)}});var u=d(()=>{"use strict"});export{D as a,F as b,L as c,P as d,h as e,R as f,x as g,T as h,U as i,u as j};
|
||||
//# sourceMappingURL=chunk-QY3QWFKW.js.map
|
||||
1
_node_modules/@electric-sql/pglite/dist/chunk-QY3QWFKW.js.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite/dist/chunk-QY3QWFKW.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../node_modules/.pnpm/tsup@8.3.0_@microsoft+api-extractor@7.47.7_@types+node@20.16.11__postcss@8.4.47_tsx@4.19.2_typescript@5.6.3/node_modules/tsup/assets/esm_shims.js"],"sourcesContent":["// Shim globals in esm bundle\nimport { fileURLToPath } from 'url'\nimport path from 'path'\n\nconst getFilename = () => fileURLToPath(import.meta.url)\nconst getDirname = () => path.dirname(getFilename())\n\nexport const __dirname = /* @__PURE__ */ getDirname()\nexport const __filename = /* @__PURE__ */ getFilename()\n"],"mappings":"kqCAAA,IAAAA,EAAAC,EAAA","names":["init_esm_shims","__esmMin"]}
|
||||
2
_node_modules/@electric-sql/pglite/dist/chunk-VBDAOXYI.js
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite/dist/chunk-VBDAOXYI.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
_node_modules/@electric-sql/pglite/dist/chunk-VBDAOXYI.js.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite/dist/chunk-VBDAOXYI.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
BIN
_node_modules/@electric-sql/pglite/dist/citext.tar.gz
generated
vendored
Normal file
BIN
_node_modules/@electric-sql/pglite/dist/citext.tar.gz
generated
vendored
Normal file
Binary file not shown.
2
_node_modules/@electric-sql/pglite/dist/contrib/amcheck.cjs
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite/dist/contrib/amcheck.cjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";var c=Object.defineProperty;var a=Object.getOwnPropertyDescriptor;var o=Object.getOwnPropertyNames;var m=Object.prototype.hasOwnProperty;var p=(t,e)=>{for(var r in e)c(t,r,{get:e[r],enumerable:!0})},u=(t,e,r,s)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of o(e))!m.call(t,n)&&n!==r&&c(t,n,{get:()=>e[n],enumerable:!(s=a(e,n))||s.enumerable});return t};var f=t=>u(c({},"__esModule",{value:!0}),t);var x={};p(x,{amcheck:()=>h});module.exports=f(x);var l=()=>typeof document>"u"?new URL(`file:${__filename}`).href:document.currentScript&&document.currentScript.src||new URL("main.js",document.baseURI).href,i=l();var d=async(t,e)=>({bundlePath:new URL("../amcheck.tar.gz",i)}),h={name:"amcheck",setup:d};0&&(module.exports={amcheck});
|
||||
//# sourceMappingURL=amcheck.cjs.map
|
||||
1
_node_modules/@electric-sql/pglite/dist/contrib/amcheck.cjs.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite/dist/contrib/amcheck.cjs.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../src/contrib/amcheck.ts","../../../../node_modules/.pnpm/tsup@8.3.0_@microsoft+api-extractor@7.47.7_@types+node@20.16.11__postcss@8.4.47_tsx@4.19.2_typescript@5.6.3/node_modules/tsup/assets/cjs_shims.js"],"sourcesContent":["import type {\n Extension,\n ExtensionSetupResult,\n PGliteInterface,\n} from '../interface'\n\nconst setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {\n return {\n bundlePath: new URL('../../release/amcheck.tar.gz', import.meta.url),\n } satisfies ExtensionSetupResult\n}\n\nexport const amcheck = {\n name: 'amcheck',\n setup,\n} satisfies Extension\n","// Shim globals in cjs bundle\n// There's a weird bug that esbuild will always inject importMetaUrl\n// if we export it as `const importMetaUrl = ... __filename ...`\n// But using a function will not cause this issue\n\nconst getImportMetaUrl = () =>\n typeof document === 'undefined'\n ? new URL(`file:${__filename}`).href\n : (document.currentScript && document.currentScript.src) ||\n new URL('main.js', document.baseURI).href\n\nexport const importMetaUrl = /* @__PURE__ */ getImportMetaUrl()\n"],"mappings":"yaAAA,IAAAA,EAAA,GAAAC,EAAAD,EAAA,aAAAE,IAAA,eAAAC,EAAAH,GCKA,IAAMI,EAAmB,IACvB,OAAO,SAAa,IAChB,IAAI,IAAI,QAAQ,UAAU,EAAE,EAAE,KAC7B,SAAS,eAAiB,SAAS,cAAc,KAClD,IAAI,IAAI,UAAW,SAAS,OAAO,EAAE,KAE9BC,EAAgCD,EAAiB,EDL9D,IAAME,EAAQ,MAAOC,EAAsBC,KAClC,CACL,WAAY,IAAI,IAAI,+BAAgCC,CAAe,CACrE,GAGWC,EAAU,CACrB,KAAM,UACN,MAAAJ,CACF","names":["amcheck_exports","__export","amcheck","__toCommonJS","getImportMetaUrl","importMetaUrl","setup","_pg","_emscriptenOpts","importMetaUrl","amcheck"]}
|
||||
10
_node_modules/@electric-sql/pglite/dist/contrib/amcheck.d.cts
generated
vendored
Normal file
10
_node_modules/@electric-sql/pglite/dist/contrib/amcheck.d.cts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import { d as PGliteInterface } from '../pglite-CntadC_p.cjs';
|
||||
|
||||
declare const amcheck: {
|
||||
name: string;
|
||||
setup: (_pg: PGliteInterface, _emscriptenOpts: any) => Promise<{
|
||||
bundlePath: URL;
|
||||
}>;
|
||||
};
|
||||
|
||||
export { amcheck };
|
||||
10
_node_modules/@electric-sql/pglite/dist/contrib/amcheck.d.ts
generated
vendored
Normal file
10
_node_modules/@electric-sql/pglite/dist/contrib/amcheck.d.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import { d as PGliteInterface } from '../pglite-CntadC_p.js';
|
||||
|
||||
declare const amcheck: {
|
||||
name: string;
|
||||
setup: (_pg: PGliteInterface, _emscriptenOpts: any) => Promise<{
|
||||
bundlePath: URL;
|
||||
}>;
|
||||
};
|
||||
|
||||
export { amcheck };
|
||||
2
_node_modules/@electric-sql/pglite/dist/contrib/amcheck.js
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite/dist/contrib/amcheck.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import{j as e}from"../chunk-QY3QWFKW.js";e();var t=async(n,s)=>({bundlePath:new URL("../amcheck.tar.gz",import.meta.url)}),c={name:"amcheck",setup:t};export{c as amcheck};
|
||||
//# sourceMappingURL=amcheck.js.map
|
||||
1
_node_modules/@electric-sql/pglite/dist/contrib/amcheck.js.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite/dist/contrib/amcheck.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../src/contrib/amcheck.ts"],"sourcesContent":["import type {\n Extension,\n ExtensionSetupResult,\n PGliteInterface,\n} from '../interface'\n\nconst setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {\n return {\n bundlePath: new URL('../../release/amcheck.tar.gz', import.meta.url),\n } satisfies ExtensionSetupResult\n}\n\nexport const amcheck = {\n name: 'amcheck',\n setup,\n} satisfies Extension\n"],"mappings":"yCAAAA,IAMA,IAAMC,EAAQ,MAAOC,EAAsBC,KAClC,CACL,WAAY,IAAI,IAAI,+BAAgC,YAAY,GAAG,CACrE,GAGWC,EAAU,CACrB,KAAM,UACN,MAAAH,CACF","names":["init_esm_shims","setup","_pg","_emscriptenOpts","amcheck"]}
|
||||
2
_node_modules/@electric-sql/pglite/dist/contrib/auto_explain.cjs
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite/dist/contrib/auto_explain.cjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";var s=Object.defineProperty;var o=Object.getOwnPropertyDescriptor;var c=Object.getOwnPropertyNames;var p=Object.prototype.hasOwnProperty;var u=(t,e)=>{for(var r in e)s(t,r,{get:e[r],enumerable:!0})},l=(t,e,r,i)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of c(e))!p.call(t,n)&&n!==r&&s(t,n,{get:()=>e[n],enumerable:!(i=o(e,n))||i.enumerable});return t};var m=t=>l(s({},"__esModule",{value:!0}),t);var _={};u(_,{auto_explain:()=>d});module.exports=m(_);var f=()=>typeof document>"u"?new URL(`file:${__filename}`).href:document.currentScript&&document.currentScript.src||new URL("main.js",document.baseURI).href,a=f();var x=async(t,e)=>({bundlePath:new URL("../auto_explain.tar.gz",a)}),d={name:"auto_explain",setup:x};0&&(module.exports={auto_explain});
|
||||
//# sourceMappingURL=auto_explain.cjs.map
|
||||
1
_node_modules/@electric-sql/pglite/dist/contrib/auto_explain.cjs.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite/dist/contrib/auto_explain.cjs.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../src/contrib/auto_explain.ts","../../../../node_modules/.pnpm/tsup@8.3.0_@microsoft+api-extractor@7.47.7_@types+node@20.16.11__postcss@8.4.47_tsx@4.19.2_typescript@5.6.3/node_modules/tsup/assets/cjs_shims.js"],"sourcesContent":["import type {\n Extension,\n ExtensionSetupResult,\n PGliteInterface,\n} from '../interface'\n\nconst setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {\n return {\n bundlePath: new URL('../../release/auto_explain.tar.gz', import.meta.url),\n } satisfies ExtensionSetupResult\n}\n\nexport const auto_explain = {\n name: 'auto_explain',\n setup,\n} satisfies Extension\n","// Shim globals in cjs bundle\n// There's a weird bug that esbuild will always inject importMetaUrl\n// if we export it as `const importMetaUrl = ... __filename ...`\n// But using a function will not cause this issue\n\nconst getImportMetaUrl = () =>\n typeof document === 'undefined'\n ? new URL(`file:${__filename}`).href\n : (document.currentScript && document.currentScript.src) ||\n new URL('main.js', document.baseURI).href\n\nexport const importMetaUrl = /* @__PURE__ */ getImportMetaUrl()\n"],"mappings":"yaAAA,IAAAA,EAAA,GAAAC,EAAAD,EAAA,kBAAAE,IAAA,eAAAC,EAAAH,GCKA,IAAMI,EAAmB,IACvB,OAAO,SAAa,IAChB,IAAI,IAAI,QAAQ,UAAU,EAAE,EAAE,KAC7B,SAAS,eAAiB,SAAS,cAAc,KAClD,IAAI,IAAI,UAAW,SAAS,OAAO,EAAE,KAE9BC,EAAgCD,EAAiB,EDL9D,IAAME,EAAQ,MAAOC,EAAsBC,KAClC,CACL,WAAY,IAAI,IAAI,oCAAqCC,CAAe,CAC1E,GAGWC,EAAe,CAC1B,KAAM,eACN,MAAAJ,CACF","names":["auto_explain_exports","__export","auto_explain","__toCommonJS","getImportMetaUrl","importMetaUrl","setup","_pg","_emscriptenOpts","importMetaUrl","auto_explain"]}
|
||||
10
_node_modules/@electric-sql/pglite/dist/contrib/auto_explain.d.cts
generated
vendored
Normal file
10
_node_modules/@electric-sql/pglite/dist/contrib/auto_explain.d.cts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import { d as PGliteInterface } from '../pglite-CntadC_p.cjs';
|
||||
|
||||
declare const auto_explain: {
|
||||
name: string;
|
||||
setup: (_pg: PGliteInterface, _emscriptenOpts: any) => Promise<{
|
||||
bundlePath: URL;
|
||||
}>;
|
||||
};
|
||||
|
||||
export { auto_explain };
|
||||
10
_node_modules/@electric-sql/pglite/dist/contrib/auto_explain.d.ts
generated
vendored
Normal file
10
_node_modules/@electric-sql/pglite/dist/contrib/auto_explain.d.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import { d as PGliteInterface } from '../pglite-CntadC_p.js';
|
||||
|
||||
declare const auto_explain: {
|
||||
name: string;
|
||||
setup: (_pg: PGliteInterface, _emscriptenOpts: any) => Promise<{
|
||||
bundlePath: URL;
|
||||
}>;
|
||||
};
|
||||
|
||||
export { auto_explain };
|
||||
2
_node_modules/@electric-sql/pglite/dist/contrib/auto_explain.js
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite/dist/contrib/auto_explain.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import{j as e}from"../chunk-QY3QWFKW.js";e();var t=async(n,s)=>({bundlePath:new URL("../auto_explain.tar.gz",import.meta.url)}),o={name:"auto_explain",setup:t};export{o as auto_explain};
|
||||
//# sourceMappingURL=auto_explain.js.map
|
||||
1
_node_modules/@electric-sql/pglite/dist/contrib/auto_explain.js.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite/dist/contrib/auto_explain.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../src/contrib/auto_explain.ts"],"sourcesContent":["import type {\n Extension,\n ExtensionSetupResult,\n PGliteInterface,\n} from '../interface'\n\nconst setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {\n return {\n bundlePath: new URL('../../release/auto_explain.tar.gz', import.meta.url),\n } satisfies ExtensionSetupResult\n}\n\nexport const auto_explain = {\n name: 'auto_explain',\n setup,\n} satisfies Extension\n"],"mappings":"yCAAAA,IAMA,IAAMC,EAAQ,MAAOC,EAAsBC,KAClC,CACL,WAAY,IAAI,IAAI,oCAAqC,YAAY,GAAG,CAC1E,GAGWC,EAAe,CAC1B,KAAM,eACN,MAAAH,CACF","names":["init_esm_shims","setup","_pg","_emscriptenOpts","auto_explain"]}
|
||||
2
_node_modules/@electric-sql/pglite/dist/contrib/bloom.cjs
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite/dist/contrib/bloom.cjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";var r=Object.defineProperty;var c=Object.getOwnPropertyDescriptor;var a=Object.getOwnPropertyNames;var m=Object.prototype.hasOwnProperty;var p=(t,e)=>{for(var o in e)r(t,o,{get:e[o],enumerable:!0})},u=(t,e,o,s)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of a(e))!m.call(t,n)&&n!==o&&r(t,n,{get:()=>e[n],enumerable:!(s=c(e,n))||s.enumerable});return t};var l=t=>u(r({},"__esModule",{value:!0}),t);var R={};p(R,{bloom:()=>x});module.exports=l(R);var f=()=>typeof document>"u"?new URL(`file:${__filename}`).href:document.currentScript&&document.currentScript.src||new URL("main.js",document.baseURI).href,i=f();var d=async(t,e)=>({bundlePath:new URL("../bloom.tar.gz",i)}),x={name:"bloom",setup:d};0&&(module.exports={bloom});
|
||||
//# sourceMappingURL=bloom.cjs.map
|
||||
1
_node_modules/@electric-sql/pglite/dist/contrib/bloom.cjs.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite/dist/contrib/bloom.cjs.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../src/contrib/bloom.ts","../../../../node_modules/.pnpm/tsup@8.3.0_@microsoft+api-extractor@7.47.7_@types+node@20.16.11__postcss@8.4.47_tsx@4.19.2_typescript@5.6.3/node_modules/tsup/assets/cjs_shims.js"],"sourcesContent":["import type {\n Extension,\n ExtensionSetupResult,\n PGliteInterface,\n} from '../interface'\n\nconst setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {\n return {\n bundlePath: new URL('../../release/bloom.tar.gz', import.meta.url),\n } satisfies ExtensionSetupResult\n}\n\nexport const bloom = {\n name: 'bloom',\n setup,\n} satisfies Extension\n","// Shim globals in cjs bundle\n// There's a weird bug that esbuild will always inject importMetaUrl\n// if we export it as `const importMetaUrl = ... __filename ...`\n// But using a function will not cause this issue\n\nconst getImportMetaUrl = () =>\n typeof document === 'undefined'\n ? new URL(`file:${__filename}`).href\n : (document.currentScript && document.currentScript.src) ||\n new URL('main.js', document.baseURI).href\n\nexport const importMetaUrl = /* @__PURE__ */ getImportMetaUrl()\n"],"mappings":"yaAAA,IAAAA,EAAA,GAAAC,EAAAD,EAAA,WAAAE,IAAA,eAAAC,EAAAH,GCKA,IAAMI,EAAmB,IACvB,OAAO,SAAa,IAChB,IAAI,IAAI,QAAQ,UAAU,EAAE,EAAE,KAC7B,SAAS,eAAiB,SAAS,cAAc,KAClD,IAAI,IAAI,UAAW,SAAS,OAAO,EAAE,KAE9BC,EAAgCD,EAAiB,EDL9D,IAAME,EAAQ,MAAOC,EAAsBC,KAClC,CACL,WAAY,IAAI,IAAI,6BAA8BC,CAAe,CACnE,GAGWC,EAAQ,CACnB,KAAM,QACN,MAAAJ,CACF","names":["bloom_exports","__export","bloom","__toCommonJS","getImportMetaUrl","importMetaUrl","setup","_pg","_emscriptenOpts","importMetaUrl","bloom"]}
|
||||
10
_node_modules/@electric-sql/pglite/dist/contrib/bloom.d.cts
generated
vendored
Normal file
10
_node_modules/@electric-sql/pglite/dist/contrib/bloom.d.cts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import { d as PGliteInterface } from '../pglite-CntadC_p.cjs';
|
||||
|
||||
declare const bloom: {
|
||||
name: string;
|
||||
setup: (_pg: PGliteInterface, _emscriptenOpts: any) => Promise<{
|
||||
bundlePath: URL;
|
||||
}>;
|
||||
};
|
||||
|
||||
export { bloom };
|
||||
10
_node_modules/@electric-sql/pglite/dist/contrib/bloom.d.ts
generated
vendored
Normal file
10
_node_modules/@electric-sql/pglite/dist/contrib/bloom.d.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import { d as PGliteInterface } from '../pglite-CntadC_p.js';
|
||||
|
||||
declare const bloom: {
|
||||
name: string;
|
||||
setup: (_pg: PGliteInterface, _emscriptenOpts: any) => Promise<{
|
||||
bundlePath: URL;
|
||||
}>;
|
||||
};
|
||||
|
||||
export { bloom };
|
||||
2
_node_modules/@electric-sql/pglite/dist/contrib/bloom.js
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite/dist/contrib/bloom.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import{j as e}from"../chunk-QY3QWFKW.js";e();var t=async(n,s)=>({bundlePath:new URL("../bloom.tar.gz",import.meta.url)}),r={name:"bloom",setup:t};export{r as bloom};
|
||||
//# sourceMappingURL=bloom.js.map
|
||||
1
_node_modules/@electric-sql/pglite/dist/contrib/bloom.js.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite/dist/contrib/bloom.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../src/contrib/bloom.ts"],"sourcesContent":["import type {\n Extension,\n ExtensionSetupResult,\n PGliteInterface,\n} from '../interface'\n\nconst setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {\n return {\n bundlePath: new URL('../../release/bloom.tar.gz', import.meta.url),\n } satisfies ExtensionSetupResult\n}\n\nexport const bloom = {\n name: 'bloom',\n setup,\n} satisfies Extension\n"],"mappings":"yCAAAA,IAMA,IAAMC,EAAQ,MAAOC,EAAsBC,KAClC,CACL,WAAY,IAAI,IAAI,6BAA8B,YAAY,GAAG,CACnE,GAGWC,EAAQ,CACnB,KAAM,QACN,MAAAH,CACF","names":["init_esm_shims","setup","_pg","_emscriptenOpts","bloom"]}
|
||||
2
_node_modules/@electric-sql/pglite/dist/contrib/btree_gin.cjs
generated
vendored
Normal file
2
_node_modules/@electric-sql/pglite/dist/contrib/btree_gin.cjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";var s=Object.defineProperty;var c=Object.getOwnPropertyDescriptor;var a=Object.getOwnPropertyNames;var p=Object.prototype.hasOwnProperty;var u=(t,e)=>{for(var r in e)s(t,r,{get:e[r],enumerable:!0})},m=(t,e,r,i)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of a(e))!p.call(t,n)&&n!==r&&s(t,n,{get:()=>e[n],enumerable:!(i=c(e,n))||i.enumerable});return t};var f=t=>m(s({},"__esModule",{value:!0}),t);var g={};u(g,{btree_gin:()=>_});module.exports=f(g);var l=()=>typeof document>"u"?new URL(`file:${__filename}`).href:document.currentScript&&document.currentScript.src||new URL("main.js",document.baseURI).href,o=l();var d=async(t,e)=>({bundlePath:new URL("../btree_gin.tar.gz",o)}),_={name:"btree_gin",setup:d};0&&(module.exports={btree_gin});
|
||||
//# sourceMappingURL=btree_gin.cjs.map
|
||||
1
_node_modules/@electric-sql/pglite/dist/contrib/btree_gin.cjs.map
generated
vendored
Normal file
1
_node_modules/@electric-sql/pglite/dist/contrib/btree_gin.cjs.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../src/contrib/btree_gin.ts","../../../../node_modules/.pnpm/tsup@8.3.0_@microsoft+api-extractor@7.47.7_@types+node@20.16.11__postcss@8.4.47_tsx@4.19.2_typescript@5.6.3/node_modules/tsup/assets/cjs_shims.js"],"sourcesContent":["import type {\n Extension,\n ExtensionSetupResult,\n PGliteInterface,\n} from '../interface'\n\nconst setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {\n return {\n bundlePath: new URL('../../release/btree_gin.tar.gz', import.meta.url),\n } satisfies ExtensionSetupResult\n}\n\nexport const btree_gin = {\n name: 'btree_gin',\n setup,\n} satisfies Extension\n","// Shim globals in cjs bundle\n// There's a weird bug that esbuild will always inject importMetaUrl\n// if we export it as `const importMetaUrl = ... __filename ...`\n// But using a function will not cause this issue\n\nconst getImportMetaUrl = () =>\n typeof document === 'undefined'\n ? new URL(`file:${__filename}`).href\n : (document.currentScript && document.currentScript.src) ||\n new URL('main.js', document.baseURI).href\n\nexport const importMetaUrl = /* @__PURE__ */ getImportMetaUrl()\n"],"mappings":"yaAAA,IAAAA,EAAA,GAAAC,EAAAD,EAAA,eAAAE,IAAA,eAAAC,EAAAH,GCKA,IAAMI,EAAmB,IACvB,OAAO,SAAa,IAChB,IAAI,IAAI,QAAQ,UAAU,EAAE,EAAE,KAC7B,SAAS,eAAiB,SAAS,cAAc,KAClD,IAAI,IAAI,UAAW,SAAS,OAAO,EAAE,KAE9BC,EAAgCD,EAAiB,EDL9D,IAAME,EAAQ,MAAOC,EAAsBC,KAClC,CACL,WAAY,IAAI,IAAI,iCAAkCC,CAAe,CACvE,GAGWC,EAAY,CACvB,KAAM,YACN,MAAAJ,CACF","names":["btree_gin_exports","__export","btree_gin","__toCommonJS","getImportMetaUrl","importMetaUrl","setup","_pg","_emscriptenOpts","importMetaUrl","btree_gin"]}
|
||||
10
_node_modules/@electric-sql/pglite/dist/contrib/btree_gin.d.cts
generated
vendored
Normal file
10
_node_modules/@electric-sql/pglite/dist/contrib/btree_gin.d.cts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import { d as PGliteInterface } from '../pglite-CntadC_p.cjs';
|
||||
|
||||
declare const btree_gin: {
|
||||
name: string;
|
||||
setup: (_pg: PGliteInterface, _emscriptenOpts: any) => Promise<{
|
||||
bundlePath: URL;
|
||||
}>;
|
||||
};
|
||||
|
||||
export { btree_gin };
|
||||
10
_node_modules/@electric-sql/pglite/dist/contrib/btree_gin.d.ts
generated
vendored
Normal file
10
_node_modules/@electric-sql/pglite/dist/contrib/btree_gin.d.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import { d as PGliteInterface } from '../pglite-CntadC_p.js';
|
||||
|
||||
declare const btree_gin: {
|
||||
name: string;
|
||||
setup: (_pg: PGliteInterface, _emscriptenOpts: any) => Promise<{
|
||||
bundlePath: URL;
|
||||
}>;
|
||||
};
|
||||
|
||||
export { btree_gin };
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user