Skip to content

Commit 3150b0d

Browse files
committed
update to sdk v4.2.1, fix error handling, add connector definition and github workflow
1 parent 66d9782 commit 3150b0d

20 files changed

+2581
-1255
lines changed

.github/workflows/docker.yml

+95
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
2+
# Example modified from https://docs.github.com/en/actions/publishing-packages/publishing-docker-images
3+
name: Create and publish a Docker image
4+
5+
# Configures this workflow to run every time a change is pushed to selected tags and branches
6+
on:
7+
pull_request:
8+
branches:
9+
- main
10+
push:
11+
branches:
12+
- main
13+
tags:
14+
- v**
15+
16+
# Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds.
17+
env:
18+
REGISTRY: ghcr.io
19+
IMAGE_NAME: ${{ github.repository }}
20+
21+
# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu.
22+
jobs:
23+
build-and-push-image:
24+
runs-on: ubuntu-latest
25+
# Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job.
26+
permissions:
27+
contents: read
28+
packages: write
29+
steps:
30+
- name: Checkout repository
31+
uses: actions/checkout@v4
32+
# Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here.
33+
- name: Log in to the Container registry
34+
uses: docker/login-action@v3
35+
with:
36+
registry: ${{ env.REGISTRY }}
37+
username: ${{ github.actor }}
38+
password: ${{ secrets.PAT_TOKEN }}
39+
# This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels.
40+
- name: Extract metadata (tags, labels) for Docker
41+
id: meta
42+
uses: docker/metadata-action@v5
43+
with:
44+
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
45+
# This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages.
46+
# It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository.
47+
# It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step.
48+
- name: Build and push Docker image
49+
uses: docker/build-push-action@v5
50+
with:
51+
context: .
52+
push: true
53+
tags: ${{ steps.meta.outputs.tags }}
54+
labels: ${{ steps.meta.outputs.labels }}
55+
- name: Build connector definition
56+
run: |
57+
set -e pipefail
58+
export DOCKER_IMAGE=$(echo "$DOCKER_METADATA_OUTPUT_JSON" | jq -r '.tags[0]')
59+
make build
60+
working-directory: ./connector-definition
61+
- uses: actions/upload-artifact@v4
62+
with:
63+
name: connector-definition.tgz
64+
path: ./connector-definition/dist/connector-definition.tgz
65+
compression-level: 0 # Already compressed
66+
67+
release-connector:
68+
name: Release connector
69+
runs-on: ubuntu-latest
70+
needs: build-and-push-image
71+
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
72+
steps:
73+
- uses: actions/checkout@v4
74+
- uses: actions/download-artifact@v4
75+
with:
76+
name: connector-definition.tgz
77+
path: ./connector-definition/dist
78+
- name: Get version from tag
79+
id: get-version
80+
run: |
81+
echo "tagged_version=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
82+
shell: bash
83+
- uses: mindsers/changelog-reader-action@v2
84+
id: changelog-reader
85+
with:
86+
version: ${{ steps.get-version.outputs.tagged_version }}
87+
path: ./CHANGELOG.md
88+
- uses: softprops/action-gh-release@v1
89+
with:
90+
draft: false
91+
tag_name: v${{ steps.get-version.outputs.tagged_version }}
92+
body: ${{ steps.changelog-reader.outputs.changes }}
93+
files: |
94+
./connector-definition/dist/connector-definition.tgz
95+
fail_on_unmatched_files: true

CHANGELOG.md

+9
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
# DuckDB Connector Changelog
2+
This changelog documents changes between release tags.
3+
4+
5+
## [Unreleased]
6+
Upcoming changes for the next versioned release.
7+
8+
## [0.0.9] - 2024-03-22
9+
* Initial Version Tag

Dockerfile

+22-9
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,16 @@
1-
# Use Node.js 18 as the base image
2-
FROM node:18 AS build-stage
1+
# Use Node.js 20 as the base image for both build and production stages
2+
FROM node:20 AS build-stage
33

4-
# Set the working directory inside the container
4+
# Set the working directory inside the container for the build stage
55
WORKDIR /usr/src/app
66

7-
# Copy package.json and package-lock.json (if you have one) to the working directory
7+
# Copy package.json and package-lock.json (if available) to the working directory
88
COPY package*.json ./
99

1010
# Install both production and development dependencies
1111
RUN npm install
1212

13+
# Install TypeScript globally
1314
RUN npm install -g typescript
1415

1516
# Copy the entire project
@@ -19,8 +20,9 @@ COPY . .
1920
RUN tsc
2021

2122
# Start a new stage for the production environment
22-
FROM node:18 AS production
23+
FROM node:20 AS production
2324

25+
# Set working directory for the production stage
2426
WORKDIR /usr/src/app
2527

2628
# Copy package.json and package-lock.json to the working directory
@@ -29,10 +31,21 @@ COPY package*.json ./
2931
# Install only production dependencies
3032
RUN npm ci --only=production
3133

32-
# Copy compiled JavaScript from the previous stage
34+
# Copy compiled JavaScript from the build stage
3335
COPY --from=build-stage /usr/src/app/dist ./dist
3436

35-
EXPOSE 8100
37+
# Define the environment variable for configuration directory with a default value, which can be overridden
38+
ENV HASURA_CONFIGURATION_DIRECTORY=/etc/connector
3639

37-
# Define the command to run the app using CMD
38-
CMD ["node", "./dist/src/index.js", "serve", "--configuration=/etc/connector/config.json"]
40+
# Set the default port environment variable and allow it to be overridden
41+
ENV HASURA_CONNECTOR_PORT=8080
42+
43+
# Expose the port specified by the HASURA_CONNECTOR_PORT environment variable
44+
EXPOSE $HASURA_CONNECTOR_PORT
45+
46+
# Copy the entrypoint script into the container and make it executable
47+
COPY entrypoint.sh /usr/local/bin/entrypoint.sh
48+
RUN chmod +x /usr/local/bin/entrypoint.sh
49+
50+
# Use the entrypoint script to handle startup and signal trapping
51+
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]

README.md

+4-2
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
## DuckDB (via MotherDuck) Connector
22

3-
The DuckDB Data Connector allows for connecting to a Motherduck hosted DuckDB database. This uses the [Typescript Data Connector SDK](https://github.com/hasura/ndc-sdk-typescript) and implements the [Data Connector Spec](https://github.com/hasura/ndc-spec).
3+
## TODO: Fix README
44

5-
In order to use this connector you will need Motherduck setup. This connector currently only supports querying.
5+
The DuckDB Data Connector allows for connecting to a Motherduck hosted DuckDB database, or a local DuckDB database file. This uses the [Typescript Data Connector SDK](https://github.com/hasura/ndc-sdk-typescript) and implements the [Data Connector Spec](https://github.com/hasura/ndc-spec).
6+
7+
This connector currently only supports querying.
68

79
## Before you get started
810
It is recommended that you:
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
packagingDefinition:
2+
type: PrebuiltDockerImage
3+
dockerImage: ghcr.io/hasura/ndc-duckdb:v0.0.9
4+
supportedEnvironmentVariables:
5+
- name: DUCKDB_URL
6+
description: The url for the DuckDB database
7+
commands:
8+
update: docker run --rm -e DUCKDB_URL="$DUCKDB_URL" -v "$HASURA_PLUGIN_CONNECTOR_CONTEXT_PATH":/etc/connector ghcr.io/hasura/ndc-duckdb:v0.0.9 update
9+
dockerComposeWatch:
10+
- path: ./
11+
target: /etc/connector
12+
action: sync+restart
+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
services:
2+
connector:
3+
develop:
4+
watch:
5+
- path: ./
6+
target: /etc/connector
7+
action: sync+restart

entrypoint.sh

+33
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
#!/bin/bash
2+
# Function to handle SIGTERM and SIGINT (graceful shutdown)
3+
graceful_shutdown() {
4+
echo "Shutting down gracefully..."
5+
# Replace this with a command to gracefully shutdown your application, if necessary
6+
kill -SIGTERM "$pid"
7+
wait "$pid"
8+
exit 0
9+
}
10+
# Function to start the application
11+
start_application() {
12+
# Start the application in the background and save its PID
13+
node ./dist/src/index.js serve &
14+
pid=$!
15+
# Trap SIGTERM and SIGINT to call the graceful_shutdown function
16+
trap 'graceful_shutdown' SIGTERM SIGINT
17+
# Wait for the application or a signal
18+
wait "$pid"
19+
}
20+
# Function to run the update process
21+
run_update() {
22+
node ./dist/generate-config.js
23+
}
24+
# Dispatch on the first argument to the script
25+
case "$1" in
26+
update)
27+
shift # Remove the first argument from the arguments list
28+
run_update "$@"
29+
;;
30+
*)
31+
start_application "$@"
32+
;;
33+
esac

generate-config.ts

+3-7
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,8 @@ import * as duckdb from 'duckdb';
44
import * as fs from 'fs';
55
import { promisify } from "util";
66
const writeFile = promisify(fs.writeFile);
7-
const DEFAULT_URL = "md:?motherduck_token=ey...";
8-
const DEFAULT_OUTPUT_FILENAME = "configuration.json";
9-
const db = new duckdb.Database(DEFAULT_URL);
7+
const DUCKDB_URL = process.env["DUCKDB_URL"] as string;
8+
const db = new duckdb.Database(DUCKDB_URL);
109
const con = db.connect();
1110

1211
const determineType = (t: string): string => {
@@ -100,9 +99,6 @@ async function main() {
10099
}
101100
}
102101
const res: Configuration = {
103-
credentials: {
104-
url: DEFAULT_URL
105-
},
106102
config: {
107103
collection_names: tableNames,
108104
collection_aliases: tableAliases,
@@ -111,7 +107,7 @@ async function main() {
111107
procedures: []
112108
}
113109
};
114-
await writeFile(DEFAULT_OUTPUT_FILENAME, JSON.stringify(res));
110+
await writeFile(`/etc/connector/config.json`, JSON.stringify(res));
115111
};
116112

117113
main();

http_requests/CONFIGURATION_SCHEMA.http

-2
This file was deleted.

http_requests/DEFAULT_CONFIGURATION.http

-2
This file was deleted.

http_requests/REMOTE_CONFIGURATION.http

-8
This file was deleted.

0 commit comments

Comments
 (0)