diff --git a/.changeset/hot-lions-smell.md b/.changeset/hot-lions-smell.md new file mode 100644 index 0000000000..750e5e2b19 --- /dev/null +++ b/.changeset/hot-lions-smell.md @@ -0,0 +1,6 @@ +--- +"@electric-sql/client": minor +"@electric-sql/react": minor +--- + +All `Shape` interfaces (`ShapeStream`, `Shape`, `useShape`) now require `table` as an additional configuration parameter, and the shape API endpoint url only needs to point to `/v1/shape`. diff --git a/.changeset/tender-pens-cheer.md b/.changeset/tender-pens-cheer.md new file mode 100644 index 0000000000..9a6e883e9f --- /dev/null +++ b/.changeset/tender-pens-cheer.md @@ -0,0 +1,5 @@ +--- +"@core/sync-service": minor +--- + +[BREAKING] All shape API endpoints now accept `table` as a query parameter rather than a path parameter, so `/v1/shape/foo?offset=-1` now becomes `/v1/shape?table=foo&offset=-1`. diff --git a/.gitignore b/.gitignore index ea28d94d74..95c3401388 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,5 @@ shape-data.json test-dbs tsconfig.tsbuildinfo wal +shapes +.sst diff --git a/README.md b/README.md index b113b27fee..da78a56347 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ docker compose -f .support/docker-compose.yml up You can then use the [HTTP API](https://electric-sql.com/docs/api/http) to sync data from your Postgres. For example, to start syncing the whole `foo` table: ```sh -curl -i 'http://localhost:3000/v1/shape/foo?offset=-1' +curl -i 'http://localhost:3000/v1/shape?table=foo&offset=-1' ``` Or use one of the clients or integrations, such as the [`useShape`](https://electric-sql.com/docs/api/integrations/react) React hook: @@ -69,7 +69,8 @@ import { useShape } from '@electric-sql/react' function Component() { const { data } = useShape({ - url: `http://localhost:3000/v1/shape/foo`, + url: `http://localhost:3000/v1/shape`, + table: `foo`, where: `title LIKE 'foo%'`, }) diff --git a/examples/auth/app/page.tsx b/examples/auth/app/page.tsx index f0ee69a91a..f046b07aaa 100644 --- a/examples/auth/app/page.tsx +++ b/examples/auth/app/page.tsx @@ -33,16 +33,15 @@ const usersShape = (): ShapeStreamOptions => { const queryParams = new URLSearchParams(window.location.search) const org_id = queryParams.get(`org_id`) return { - url: new URL( - `/shape-proxy/users?org_id=${org_id}`, - window.location.origin - ).href, + url: new URL(`/shape-proxy?org_id=${org_id}`, window.location.origin) + .href, + table: `users`, fetchClient: fetchWrapper, } } else { return { - url: new URL(`https://not-sure-how-this-works.com/shape-proxy/items`) - .href, + url: new URL(`https://not-sure-how-this-works.com/shape-proxy`).href, + table: `items`, } } } diff --git a/examples/auth/app/shape-proxy/[...table]/route.ts b/examples/auth/app/shape-proxy/route.ts similarity index 88% rename from examples/auth/app/shape-proxy/[...table]/route.ts rename to examples/auth/app/shape-proxy/route.ts index 73bb4e2dfd..14fd6fdd1d 100644 --- a/examples/auth/app/shape-proxy/[...table]/route.ts +++ b/examples/auth/app/shape-proxy/route.ts @@ -1,12 +1,8 @@ -export async function GET( - request: Request, - { params }: { params: { table: string } } -) { +export async function GET(request: Request) { const url = new URL(request.url) - const { table } = params // Constuct the upstream URL - const originUrl = new URL(`http://localhost:3000/v1/shape/${table}`) + const originUrl = new URL(`http://localhost:3000/v1/shape`) url.searchParams.forEach((value, key) => { originUrl.searchParams.set(key, value) }) diff --git a/examples/bash-client/bash-client.bash b/examples/bash-client/bash-client.bash index e62af39fda..0a7045bfdc 100755 --- a/examples/bash-client/bash-client.bash +++ b/examples/bash-client/bash-client.bash @@ -1,7 +1,7 @@ #!/bin/bash # URL to download the JSON file from (without the output parameter) -BASE_URL="http://localhost:3000/v1/shape/todos" +BASE_URL="http://localhost:3000/v1/shape?table=todos" # Directory to store individual JSON files OFFSET_DIR="./json_files" @@ -78,7 +78,7 @@ process_json() { # Main loop to poll for updates every second while true; do - url="$BASE_URL?offset=$LATEST_OFFSET" + url="$BASE_URL&offset=$LATEST_OFFSET" echo $url LATEST_OFFSET=$(process_json "$url" "shape-data.json") diff --git a/examples/basic-example/src/Example.tsx b/examples/basic-example/src/Example.tsx index 4902444793..2f92992338 100644 --- a/examples/basic-example/src/Example.tsx +++ b/examples/basic-example/src/Example.tsx @@ -7,7 +7,8 @@ const baseUrl = import.meta.env.ELECTRIC_URL ?? `http://localhost:3000` export const Example = () => { const { data: items } = useShape({ - url: `${baseUrl}/v1/shape/items`, + url: `${baseUrl}/v1/shape`, + table: `items` }) /* diff --git a/examples/linearlite/.env.prod b/examples/linearlite/.env.prod new file mode 100644 index 0000000000..ae8b7f377f --- /dev/null +++ b/examples/linearlite/.env.prod @@ -0,0 +1 @@ +DATABASE_URL=postgresql://neondb_owner:nM9OBhJAr6wv@ep-curly-truth-a43bi79a.us-east-1.aws.neon.tech/neondb?sslmode=require diff --git a/examples/linearlite/src/pages/Issue/Comments.tsx b/examples/linearlite/src/pages/Issue/Comments.tsx index 79175620c7..a7eea79dcf 100644 --- a/examples/linearlite/src/pages/Issue/Comments.tsx +++ b/examples/linearlite/src/pages/Issue/Comments.tsx @@ -17,7 +17,8 @@ export interface CommentsProps { function Comments(commentProps: CommentsProps) { const [newCommentBody, setNewCommentBody] = useState(``) const allComments = useShape({ - url: `${baseUrl}/v1/shape/comment`, + url: `${baseUrl}/v1/shape`, + table: `comment`, })! as Comment[] const comments = allComments.data.filter( diff --git a/examples/linearlite/src/shapes.ts b/examples/linearlite/src/shapes.ts index fc4dabe149..7c11f22005 100644 --- a/examples/linearlite/src/shapes.ts +++ b/examples/linearlite/src/shapes.ts @@ -1,5 +1,6 @@ import { baseUrl } from './electric' export const issueShape = { - url: `${baseUrl}/v1/shape/issue`, + url: `${baseUrl}/v1/shape`, + table: `issue`, } diff --git a/examples/nextjs-example/app/page.tsx b/examples/nextjs-example/app/page.tsx index 51fe2e230a..239ebe2a7d 100644 --- a/examples/nextjs-example/app/page.tsx +++ b/examples/nextjs-example/app/page.tsx @@ -9,12 +9,13 @@ import { matchStream } from "./match-stream" const itemShape = () => { if (typeof window !== `undefined`) { return { - url: new URL(`/shape-proxy/items`, window?.location.origin).href, + url: new URL(`/shape-proxy`, window?.location.origin).href, + table: `items`, } } else { return { - url: new URL(`https://not-sure-how-this-works.com/shape-proxy/items`) - .href, + url: new URL(`https://not-sure-how-this-works.com/shape-proxy`).href, + table: `items`, } } } diff --git a/examples/nextjs-example/app/shape-proxy/[...table]/route.ts b/examples/nextjs-example/app/shape-proxy/route.ts similarity index 80% rename from examples/nextjs-example/app/shape-proxy/[...table]/route.ts rename to examples/nextjs-example/app/shape-proxy/route.ts index 54e3562d55..f542070d5b 100644 --- a/examples/nextjs-example/app/shape-proxy/[...table]/route.ts +++ b/examples/nextjs-example/app/shape-proxy/route.ts @@ -1,10 +1,6 @@ -export async function GET( - request: Request, - { params }: { params: { table: string } } -) { +export async function GET(request: Request) { const url = new URL(request.url) - const { table } = params - const originUrl = new URL(`http://localhost:3000/v1/shape/${table}`) + const originUrl = new URL(`http://localhost:3000/v1/shape`) url.searchParams.forEach((value, key) => { originUrl.searchParams.set(key, value) }) diff --git a/examples/redis-sync/src/index.ts b/examples/redis-sync/src/index.ts index a306eefef9..178e54d6b7 100644 --- a/examples/redis-sync/src/index.ts +++ b/examples/redis-sync/src/index.ts @@ -33,7 +33,8 @@ client.connect().then(async () => { const updateKeyScriptSha1 = await client.SCRIPT_LOAD(script) const itemsStream = new ShapeStream({ - url: `http://localhost:3000/v1/shape/items`, + url: `http://localhost:3000/v1/shape`, + table: `items`, }) itemsStream.subscribe(async (messages: Message[]) => { // Begin a Redis transaction diff --git a/examples/remix-basic/app/routes/_index.tsx b/examples/remix-basic/app/routes/_index.tsx index 80aaaf57bb..d2d708055a 100644 --- a/examples/remix-basic/app/routes/_index.tsx +++ b/examples/remix-basic/app/routes/_index.tsx @@ -7,7 +7,8 @@ import { matchStream } from "../match-stream" const itemShape = () => { return { - url: new URL(`/shape-proxy/items`, window.location.origin).href, + url: new URL(`/shape-proxy`, window.location.origin).href, + table: `items`, } } diff --git a/examples/remix-basic/app/routes/shape-proxy.$table.ts b/examples/remix-basic/app/routes/shape-proxy.ts similarity index 90% rename from examples/remix-basic/app/routes/shape-proxy.$table.ts rename to examples/remix-basic/app/routes/shape-proxy.ts index dd5d0526a7..1d618ee04e 100644 --- a/examples/remix-basic/app/routes/shape-proxy.$table.ts +++ b/examples/remix-basic/app/routes/shape-proxy.ts @@ -2,8 +2,7 @@ import type { LoaderFunctionArgs } from "@remix-run/node" export async function loader({ params, request }: LoaderFunctionArgs) { const url = new URL(request.url) - const { table } = params - const originUrl = new URL(`http://localhost:3000/v1/shape/${table}`) + const originUrl = new URL(`http://localhost:3000/v1/shape`) url.searchParams.forEach((value, key) => { originUrl.searchParams.set(key, value) }) diff --git a/examples/tanstack-example/src/Example.tsx b/examples/tanstack-example/src/Example.tsx index 3ae8f9156d..a5a790138a 100644 --- a/examples/tanstack-example/src/Example.tsx +++ b/examples/tanstack-example/src/Example.tsx @@ -14,7 +14,8 @@ const baseUrl = import.meta.env.ELECTRIC_URL ?? `http://localhost:3000` const baseApiUrl = `http://localhost:3001` const itemShape = () => ({ - url: new URL(`/v1/shape/items`, baseUrl).href, + url: new URL(`/v1/shape`, baseUrl).href, + table: `items` }) async function createItem(newId: string) { @@ -43,11 +44,11 @@ async function clearItems(numItems: number) { const findUpdatePromise = numItems > 0 ? matchStream({ - stream: itemsStream, - operations: [`delete`], - // First delete will match - matchFn: () => true, - }) + stream: itemsStream, + operations: [`delete`], + // First delete will match + matchFn: () => true, + }) : Promise.resolve() // Delete all items diff --git a/examples/todo-app/src/routes/index.tsx b/examples/todo-app/src/routes/index.tsx index 9c90fbac04..7acae6ad62 100644 --- a/examples/todo-app/src/routes/index.tsx +++ b/examples/todo-app/src/routes/index.tsx @@ -19,7 +19,8 @@ type ToDo = { export default function Index() { const { data: todos } = useShape({ - url: `http://localhost:3000/v1/shape/todos`, + url: `http://localhost:3000/v1/shape`, + table: `todos`, }) todos.sort((a, b) => a.created_at - b.created_at) console.log({ todos }) diff --git a/integration-tests/tests/crash-recovery.lux b/integration-tests/tests/crash-recovery.lux index 3349ded84f..6eae172add 100644 --- a/integration-tests/tests/crash-recovery.lux +++ b/integration-tests/tests/crash-recovery.lux @@ -40,7 +40,7 @@ # Initialize a shape and collect the offset [shell client] # strip ANSI codes from response for easier matching - !curl -v -X GET http://localhost:3000/v1/shape/items?offset=-1 + !curl -v -X GET "http://localhost:3000/v1/shape?table=items&offset=-1" ?electric-shape-id: ([\d-]+) [local shape_id=$1] ?electric-chunk-last-offset: ([\w\d_]+) @@ -58,7 +58,7 @@ # Client should be able to continue same shape [shell client] - !curl -v -X GET "http://localhost:3000/v1/shape/items?offset=$last_offset&shape_id=$shape_id" + !curl -v -X GET "http://localhost:3000/v1/shape?table=items&offset=$last_offset&shape_id=$shape_id" ??HTTP/1.1 200 OK [cleanup] diff --git a/integration-tests/tests/resuming-replication-at-consistent-point.lux b/integration-tests/tests/resuming-replication-at-consistent-point.lux index 3ccc822044..44851c59da 100644 --- a/integration-tests/tests/resuming-replication-at-consistent-point.lux +++ b/integration-tests/tests/resuming-replication-at-consistent-point.lux @@ -29,10 +29,10 @@ ## Initialize a couple of shapes so that Electric starts processing transactions from Postgres [shell client] - !curl -i http://localhost:3000/v1/shape/roots?offset=-1 + !curl -i "http://localhost:3000/v1/shape?table=roots&offset=-1" ??200 OK - !curl -i http://localhost:3000/v1/shape/leaves?offset=-1 + !curl -i "http://localhost:3000/v1/shape?table=leaves&offset=-1" ??200 OK ## Commit enough new transactions for shape storage to hit the simulated failure. diff --git a/packages/react-hooks/README.md b/packages/react-hooks/README.md index 29b1ec56c3..56a3fe941f 100644 --- a/packages/react-hooks/README.md +++ b/packages/react-hooks/README.md @@ -21,7 +21,8 @@ import { useShape } from "@electric-sql/react" export default function MyComponent () { const { isLoading, data } = useShape({ - url: "http://my-api.com/shape/foo", + url: "http://my-api.com/shape", + table: `foo`, }) if (isLoading) { diff --git a/packages/react-hooks/test/react-hooks.test-d.ts b/packages/react-hooks/test/react-hooks.test-d.ts index 6f7c934db8..c5a5ab5ec3 100644 --- a/packages/react-hooks/test/react-hooks.test-d.ts +++ b/packages/react-hooks/test/react-hooks.test-d.ts @@ -5,6 +5,7 @@ import { Row } from 'packages/typescript-client/dist' describe(`useShape`, () => { it(`should infer correct return type when no selector is provided`, () => { const shape = useShape({ + table: ``, url: ``, }) @@ -20,6 +21,7 @@ describe(`useShape`, () => { it(`should infer correct return type when a selector is provided`, () => { const shape = useShape({ + table: ``, url: ``, selector: (_value: UseShapeResult) => { return { @@ -36,6 +38,7 @@ describe(`useShape`, () => { it(`should raise a type error if type argument does not equal inferred return type`, () => { const shape = useShape({ + table: ``, url: ``, // @ts-expect-error - should have type mismatch, because doesn't match the declared `Number` type selector: (_value: UseShapeResult) => { diff --git a/packages/react-hooks/test/react-hooks.test.tsx b/packages/react-hooks/test/react-hooks.test.tsx index 84df2860b6..839773051a 100644 --- a/packages/react-hooks/test/react-hooks.test.tsx +++ b/packages/react-hooks/test/react-hooks.test.tsx @@ -12,12 +12,14 @@ describe(`sortedOptionsHash`, () => { `should create the same hash from options sorted in different ways`, () => { const hash1 = sortedOptionsHash({ - url: `http://whatever/foo`, + url: `http://whatever`, + table: `foo`, offset: `-1`, }) const hash2 = sortedOptionsHash({ offset: `-1`, - url: `http://whatever/foo`, + table: `foo`, + url: `http://whatever`, }) expect(hash1).toEqual(hash2) } @@ -28,7 +30,8 @@ describe(`useShape`, () => { it(`should sync an empty shape`, async ({ aborter, issuesTableUrl }) => { const { result } = renderHook(() => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, subscribe: false, }) @@ -49,7 +52,8 @@ describe(`useShape`, () => { const { result } = renderHook(() => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter?.signal, subscribe: false, }) @@ -63,7 +67,8 @@ describe(`useShape`, () => { it(`should expose isLoading status`, async ({ issuesTableUrl }) => { const { result } = renderHook(() => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, fetchClient: async (input, init) => { await sleep(10) return fetch(input, init) @@ -81,7 +86,8 @@ describe(`useShape`, () => { }) => { const { result } = renderHook(() => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, fetchClient: async (input, init) => { await sleep(50) return fetch(input, init) @@ -108,13 +114,15 @@ describe(`useShape`, () => { const { result } = renderHook(() => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, subscribe: true, }) ) await waitFor(() => expect(result.current.data).not.toEqual([])) + await sleep(100) // TODO: remove later, just testing if this improves flakes // Add an item. const [id2] = await insertIssues({ title: `other row` }) @@ -139,7 +147,8 @@ describe(`useShape`, () => { const { result, rerender } = renderHook((options) => useShape(options), { initialProps: { - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, where: `id = '${id}'`, signal: aborter.signal, subscribe: true, @@ -151,7 +160,8 @@ describe(`useShape`, () => { ) rerender({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, where: `id = '${id2}'`, signal: aborter.signal, subscribe: true, @@ -172,7 +182,8 @@ describe(`useShape`, () => { const { result } = renderHook(() => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, subscribe: true, selector: (result) => { @@ -218,7 +229,8 @@ describe(`useShape`, () => { const { result, rerender } = renderHook( ({ selector }) => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, subscribe: true, selector: selector, @@ -246,7 +258,8 @@ describe(`useShape`, () => { const { result, unmount } = renderHook(() => useShape({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, subscribe: true, }) @@ -261,7 +274,7 @@ describe(`useShape`, () => { // And wait until it's definitely seen await waitFor(async () => { const res = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1` + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1` ) const body = (await res.json()) as Message[] expect(body).toMatchObject([{}, { value: { id: newId } }]) diff --git a/packages/react-hooks/test/support/test-context.ts b/packages/react-hooks/test/support/test-context.ts index a3f3c44d2c..df22ce4d0d 100644 --- a/packages/react-hooks/test/support/test-context.ts +++ b/packages/react-hooks/test/support/test-context.ts @@ -38,7 +38,7 @@ export const testWithDbClient = test.extend<{ use(async (table: string, shapeId?: string) => { const baseUrl = inject(`baseUrl`) const resp = await fetch( - `${baseUrl}/v1/shape/${table}${shapeId ? `?shape_id=${shapeId}` : ``}`, + `${baseUrl}/v1/shape?table=${table}${shapeId ? `&shape_id=${shapeId}` : ``}`, { method: `DELETE`, } @@ -47,7 +47,7 @@ export const testWithDbClient = test.extend<{ console.error( await FetchError.fromResponse( resp, - `DELETE ${baseUrl}/v1/shape/${table}` + `DELETE ${baseUrl}/v1/shape?table=${table}` ) ) throw new Error(`Could not delete shape ${table} with ID ${shapeId}`) diff --git a/packages/sync-service/lib/electric/plug/delete_shape_plug.ex b/packages/sync-service/lib/electric/plug/delete_shape_plug.ex index 64ee3ca293..90a8df2768 100644 --- a/packages/sync-service/lib/electric/plug/delete_shape_plug.ex +++ b/packages/sync-service/lib/electric/plug/delete_shape_plug.ex @@ -26,7 +26,7 @@ defmodule Electric.Plug.DeleteShapePlug do defp validate_query_params(%Plug.Conn{} = conn, _) do all_params = Map.merge(conn.query_params, conn.path_params) - |> Map.take(["root_table", "shape_id"]) + |> Map.take(["table", "shape_id"]) |> Map.put("offset", "-1") case Params.validate(all_params, inspector: conn.assigns.config[:inspector]) do diff --git a/packages/sync-service/lib/electric/plug/label_process_plug.ex b/packages/sync-service/lib/electric/plug/label_process_plug.ex index 4e43046a44..badc92c576 100644 --- a/packages/sync-service/lib/electric/plug/label_process_plug.ex +++ b/packages/sync-service/lib/electric/plug/label_process_plug.ex @@ -26,19 +26,19 @@ defmodule Electric.Plug.LabelProcessPlug do iex> process_label(%{ ...> method: "GET", - ...> request_path: "/v1/shape/users", - ...> query_string: "offset=-1", + ...> request_path: "/v1/shape", + ...> query_string: "table=users&offset=-1", ...> assigns: %{plug_request_id: "F-jPUudNHxbD8lIAABQG"} ...> }) - "Request F-jPUudNHxbD8lIAABQG - GET /v1/shape/users?offset=-1" + "Request F-jPUudNHxbD8lIAABQG - GET /v1/shape?table=users&offset=-1" iex> process_label(%{ ...> method: "GET", - ...> request_path: "/v1/shape/users", - ...> query_string: "", + ...> request_path: "/v1/shape", + ...> query_string: "table=users", ...> assigns: %{plug_request_id: "F-jPUudNHxbD8lIAABQG"} ...> }) - "Request F-jPUudNHxbD8lIAABQG - GET /v1/shape/users" + "Request F-jPUudNHxbD8lIAABQG - GET /v1/shape?table=users" """ def process_label(conn) do "Request #{conn.assigns.plug_request_id} - #{conn.method} #{conn.request_path}#{query_suffix(conn)}" diff --git a/packages/sync-service/lib/electric/plug/router.ex b/packages/sync-service/lib/electric/plug/router.ex index fe0c664106..0d72030ab9 100644 --- a/packages/sync-service/lib/electric/plug/router.ex +++ b/packages/sync-service/lib/electric/plug/router.ex @@ -15,9 +15,9 @@ defmodule Electric.Plug.Router do match "/", via: [:get, :head], do: send_resp(conn, 200, "") - get "/v1/shape/:root_table", to: Electric.Plug.ServeShapePlug - delete "/v1/shape/:root_table", to: Electric.Plug.DeleteShapePlug - match "/v1/shape/:root_table", via: :options, to: Electric.Plug.OptionsShapePlug + get "/v1/shape", to: Electric.Plug.ServeShapePlug + delete "/v1/shape", to: Electric.Plug.DeleteShapePlug + match "/v1/shape", via: :options, to: Electric.Plug.OptionsShapePlug get "/v1/health", to: Electric.Plug.HealthCheckPlug @@ -27,7 +27,7 @@ defmodule Electric.Plug.Router do def server_header(conn, version), do: conn |> Plug.Conn.put_resp_header("server", "ElectricSQL/#{version}") - def put_cors_headers(%Plug.Conn{path_info: ["v1", "shape", _ | _]} = conn, _opts), + def put_cors_headers(%Plug.Conn{path_info: ["v1", "shape" | _]} = conn, _opts), do: CORSHeaderPlug.call(conn, %{methods: ["GET", "HEAD", "DELETE", "OPTIONS"]}) def put_cors_headers(conn, _opts), diff --git a/packages/sync-service/lib/electric/plug/serve_shape_plug.ex b/packages/sync-service/lib/electric/plug/serve_shape_plug.ex index 48fab9bdf6..90546bb0e7 100644 --- a/packages/sync-service/lib/electric/plug/serve_shape_plug.ex +++ b/packages/sync-service/lib/electric/plug/serve_shape_plug.ex @@ -60,7 +60,7 @@ defmodule Electric.Plug.ServeShapePlug do @primary_key false embedded_schema do - field(:root_table, :string) + field(:table, :string) field(:offset, :string) field(:shape_id, :string) field(:live, :boolean, default: false) @@ -74,7 +74,7 @@ defmodule Electric.Plug.ServeShapePlug do |> cast(params, __schema__(:fields) -- [:shape_definition], message: fn _, _ -> "must be %{type}" end ) - |> validate_required([:root_table, :offset]) + |> validate_required([:table, :offset]) |> cast_offset() |> cast_columns() |> validate_shape_id_with_offset() @@ -149,7 +149,7 @@ defmodule Electric.Plug.ServeShapePlug do end def cast_root_table(%Ecto.Changeset{} = changeset, opts) do - table = fetch_change!(changeset, :root_table) + table = fetch_change!(changeset, :table) where = fetch_field!(changeset, :where) columns = get_change(changeset, :columns, nil) @@ -263,7 +263,7 @@ defmodule Electric.Plug.ServeShapePlug do end defp handle_shape_info( - %Conn{assigns: %{shape_id: shape_id, config: config}} = conn, + %Conn{assigns: %{shape_id: shape_id, table: table, config: config}} = conn, {active_shape_id, _} ) do if Shapes.has_shape?(config, shape_id) do @@ -277,7 +277,7 @@ defmodule Electric.Plug.ServeShapePlug do else # The requested shape_id is not found, returns 409 along with a location redirect for clients to # re-request the shape from scratch with the new shape id which acts as a consistent cache buster - # e.g. GET /v1/shape/{root_table}?shape_id={new_shape_id}&offset=-1 + # e.g. GET /v1/shape?table={root_table}&shape_id={new_shape_id}&offset=-1 # TODO: discuss returning a 307 redirect rather than a 409, the client # will have to detect this and throw out old data @@ -285,7 +285,7 @@ defmodule Electric.Plug.ServeShapePlug do |> put_resp_header("electric-shape-id", active_shape_id) |> put_resp_header( "location", - "#{conn.request_path}?shape_id=#{active_shape_id}&offset=-1" + "#{conn.request_path}?table=#{table}&shape_id=#{active_shape_id}&offset=-1" ) |> send_resp(409, @must_refetch) |> halt() @@ -607,7 +607,7 @@ defmodule Electric.Plug.ServeShapePlug do %{ "shape.id" => shape_id, "shape.where" => assigns[:where], - "shape.root_table" => assigns[:root_table], + "shape.root_table" => assigns[:table], "shape.definition" => assigns[:shape_definition], "shape_req.is_live" => assigns[:live], "shape_req.offset" => assigns[:offset], diff --git a/packages/sync-service/lib/electric/plug/utils.ex b/packages/sync-service/lib/electric/plug/utils.ex index 6f7942020e..e8aa711784 100644 --- a/packages/sync-service/lib/electric/plug/utils.ex +++ b/packages/sync-service/lib/electric/plug/utils.ex @@ -47,6 +47,37 @@ defmodule Electric.Plug.Utils do end) end + @doc """ + Calculate the next interval that should be used for long polling based on the + current time and previous interval used. + """ + @oct9th2024 DateTime.from_naive!(~N[2024-10-09 00:00:00], "Etc/UTC") + @spec seconds_since_oct9th_2024_next_interval(integer(), binary() | nil) :: integer() + def seconds_since_oct9th_2024_next_interval(long_poll_timeout_ms, prev_interval \\ nil) do + case div(long_poll_timeout_ms, 1000) do + 0 -> + 0 + + long_poll_timeout_sec -> + now = DateTime.utc_now() + + diff_in_seconds = DateTime.diff(now, @oct9th2024, :second) + next_interval = ceil(diff_in_seconds / long_poll_timeout_sec) * long_poll_timeout_sec + + # randomize the interval if previous one is the same + next_interval = + if prev_interval && "#{next_interval}" == prev_interval do + # Generate a random integer between 0 and 99999 + random_integer = :rand.uniform(100_000) + next_interval + random_integer + else + next_interval + end + + next_interval + end + end + defmodule CORSHeaderPlug do @behaviour Plug import Plug.Conn diff --git a/packages/sync-service/lib/electric/shapes/shape.ex b/packages/sync-service/lib/electric/shapes/shape.ex index 6bcdc513d3..084d080cc0 100644 --- a/packages/sync-service/lib/electric/shapes/shape.ex +++ b/packages/sync-service/lib/electric/shapes/shape.ex @@ -132,7 +132,7 @@ defmodule Electric.Shapes.Shape do defp load_column_info(table, inspector) do case Inspector.load_column_info(table, inspector) do :table_not_found -> - {:error, {:root_table, ["table not found"]}} + {:error, {:table, ["table not found"]}} {:ok, column_info} -> # %{["column_name"] => :type} @@ -154,13 +154,13 @@ defmodule Electric.Shapes.Shape do case Regex.run(~r/.+ relation "(?.+)" does not exist/, err, capture: :all_names) do [table_name] -> {:error, - {:root_table, + {:table, [ ~s|Table "#{table_name}" does not exist. If the table name contains capitals or special characters you must quote it.| ]}} _ -> - {:error, {:root_table, [err]}} + {:error, {:table, [err]}} end end end diff --git a/packages/sync-service/test/electric/plug/delete_shape_plug_test.exs b/packages/sync-service/test/electric/plug/delete_shape_plug_test.exs index 6e698eed13..5a9a90dfb9 100644 --- a/packages/sync-service/test/electric/plug/delete_shape_plug_test.exs +++ b/packages/sync-service/test/electric/plug/delete_shape_plug_test.exs @@ -56,7 +56,7 @@ defmodule Electric.Plug.DeleteShapePlugTest do describe "DeleteShapePlug" do test "returns 404 if shape deletion is not allowed" do conn = - conn("DELETE", "?root_table=.invalid_shape", false) + conn("DELETE", "?table=.invalid_shape", false) |> DeleteShapePlug.call([]) assert conn.status == 404 @@ -68,13 +68,13 @@ defmodule Electric.Plug.DeleteShapePlugTest do test "returns 400 for invalid params" do conn = - conn("DELETE", "?root_table=.invalid_shape") + conn("DELETE", "?table=.invalid_shape") |> DeleteShapePlug.call([]) assert conn.status == 400 assert Jason.decode!(conn.resp_body) == %{ - "root_table" => [ + "table" => [ "Invalid zero-length delimited identifier" ] } @@ -86,7 +86,7 @@ defmodule Electric.Plug.DeleteShapePlugTest do |> expect(:clean_shape, fn @test_shape_id, _ -> :ok end) conn = - conn(:delete, "?root_table=public.users") + conn(:delete, "?table=public.users") |> DeleteShapePlug.call([]) assert conn.status == 202 @@ -97,7 +97,7 @@ defmodule Electric.Plug.DeleteShapePlugTest do |> expect(:clean_shape, fn @test_shape_id, _ -> :ok end) conn = - conn(:delete, "?root_table=public.users&shape_id=#{@test_shape_id}") + conn(:delete, "?table=public.users&shape_id=#{@test_shape_id}") |> DeleteShapePlug.call([]) assert conn.status == 202 diff --git a/packages/sync-service/test/electric/plug/router_test.exs b/packages/sync-service/test/electric/plug/router_test.exs index 5ad4908b7d..4e1a7d5c74 100644 --- a/packages/sync-service/test/electric/plug/router_test.exs +++ b/packages/sync-service/test/electric/plug/router_test.exs @@ -72,7 +72,7 @@ defmodule Electric.Plug.RouterTest do ] test "GET returns a snapshot of initial data", %{opts: opts} do conn = - conn("GET", "/v1/shape/items?offset=-1") + conn("GET", "/v1/shape?table=items&offset=-1") |> Router.call(opts) assert %{status: 200} = conn @@ -92,13 +92,13 @@ defmodule Electric.Plug.RouterTest do test "GET returns an error when table is not found", %{opts: opts} do conn = - conn("GET", "/v1/shape/nonexistent?offset=-1") + conn("GET", "/v1/shape?table=nonexistent&offset=-1") |> Router.call(opts) assert %{status: 400} = conn assert %{ - "root_table" => [ + "table" => [ ~s|Table "nonexistent" does not exist. If the table name contains capitals or special characters you must quote it.| ] } = Jason.decode!(conn.resp_body) @@ -110,7 +110,7 @@ defmodule Electric.Plug.RouterTest do ] test "GET returns values in the snapshot and the rest of the log in the same format (as strings)", %{opts: opts, db_conn: db_conn} do - conn = conn("GET", "/v1/shape/items?offset=-1") |> Router.call(opts) + conn = conn("GET", "/v1/shape?table=items&offset=-1") |> Router.call(opts) assert [%{"value" => %{"num" => "1"}}] = Jason.decode!(conn.resp_body) Postgrex.query!( @@ -122,7 +122,8 @@ defmodule Electric.Plug.RouterTest do shape_id = get_resp_shape_id(conn) conn = - conn("GET", "/v1/shape/items?shape_id=#{shape_id}&offset=0_0&live") |> Router.call(opts) + conn("GET", "/v1/shape?table=items&shape_id=#{shape_id}&offset=0_0&live") + |> Router.call(opts) assert [%{"value" => %{"num" => "2"}}, _] = Jason.decode!(conn.resp_body) end @@ -133,7 +134,7 @@ defmodule Electric.Plug.RouterTest do test "DELETE forces the shape ID to be different on reconnect and new snapshot to be created", %{opts: opts, db_conn: db_conn} do conn = - conn("GET", "/v1/shape/items?offset=-1") + conn("GET", "/v1/shape?table=items&offset=-1") |> Router.call(opts) assert %{status: 200} = conn @@ -143,14 +144,14 @@ defmodule Electric.Plug.RouterTest do Jason.decode!(conn.resp_body) assert %{status: 202} = - conn("DELETE", "/v1/shape/items?shape_id=#{shape1_id}") + conn("DELETE", "/v1/shape?table=items&shape_id=#{shape1_id}") |> Router.call(opts) Postgrex.query!(db_conn, "DELETE FROM items", []) Postgrex.query!(db_conn, "INSERT INTO items VALUES (gen_random_uuid(), 'test value 2')", []) conn = - conn("GET", "/v1/shape/items?offset=-1") + conn("GET", "/v1/shape?table=items&offset=-1") |> Router.call(opts) assert %{status: 200} = conn @@ -171,7 +172,7 @@ defmodule Electric.Plug.RouterTest do } do # Request a snapshot conn = - conn("GET", "/v1/shape/foo?offset=-1") + conn("GET", "/v1/shape?table=foo&offset=-1") |> Router.call(opts) assert %{status: 200} = conn @@ -200,7 +201,7 @@ defmodule Electric.Plug.RouterTest do task = Task.async(fn -> - conn("GET", "/v1/shape/foo?offset=#{@first_offset}&shape_id=#{shape_id}&live") + conn("GET", "/v1/shape?table=foo&offset=#{@first_offset}&shape_id=#{shape_id}&live") |> Router.call(opts) end) @@ -243,7 +244,7 @@ defmodule Electric.Plug.RouterTest do "INSERT INTO wide_table VALUES (1, 'test value 1', 'test value 1', 'test value 1')" ] test "GET received only a diff when receiving updates", %{opts: opts, db_conn: db_conn} do - conn = conn("GET", "/v1/shape/wide_table?offset=-1") |> Router.call(opts) + conn = conn("GET", "/v1/shape?table=wide_table&offset=-1") |> Router.call(opts) assert %{status: 200} = conn shape_id = get_resp_shape_id(conn) @@ -256,7 +257,7 @@ defmodule Electric.Plug.RouterTest do task = Task.async(fn -> - conn("GET", "/v1/shape/wide_table?offset=0_0&shape_id=#{shape_id}&live") + conn("GET", "/v1/shape?table=wide_table&offset=0_0&shape_id=#{shape_id}&live") |> Router.call(opts) end) @@ -277,7 +278,7 @@ defmodule Electric.Plug.RouterTest do opts: opts, db_conn: db_conn } do - conn = conn("GET", "/v1/shape/wide_table?offset=-1") |> Router.call(opts) + conn = conn("GET", "/v1/shape?table=wide_table&offset=-1") |> Router.call(opts) assert %{status: 200} = conn shape_id = get_resp_shape_id(conn) @@ -290,7 +291,7 @@ defmodule Electric.Plug.RouterTest do task = Task.async(fn -> - conn("GET", "/v1/shape/wide_table?offset=0_0&shape_id=#{shape_id}&live") + conn("GET", "/v1/shape?table=wide_table&offset=0_0&shape_id=#{shape_id}&live") |> Router.call(opts) end) @@ -340,7 +341,7 @@ defmodule Electric.Plug.RouterTest do ] test "GET works correctly when table has no PK", %{opts: opts, db_conn: db_conn} do - conn = conn("GET", "/v1/shape/test_table?offset=-1") |> Router.call(opts) + conn = conn("GET", "/v1/shape?table=test_table&offset=-1") |> Router.call(opts) assert %{status: 200} = conn shape_id = get_resp_shape_id(conn) @@ -349,7 +350,7 @@ defmodule Electric.Plug.RouterTest do task = Task.async(fn -> - conn("GET", "/v1/shape/test_table?offset=0_0&shape_id=#{shape_id}&live") + conn("GET", "/v1/shape?table=test_table&offset=0_0&shape_id=#{shape_id}&live") |> Router.call(opts) end) @@ -390,7 +391,9 @@ defmodule Electric.Plug.RouterTest do "INSERT INTO wide_table VALUES (1, 'test value 1', 'test value 1', 'test value 1')" ] test "GET receives only specified columns out of wide table", %{opts: opts, db_conn: db_conn} do - conn = conn("GET", "/v1/shape/wide_table?offset=-1&columns=id,value1") |> Router.call(opts) + conn = + conn("GET", "/v1/shape?table=wide_table&offset=-1&columns=id,value1") |> Router.call(opts) + assert %{status: 200} = conn shape_id = get_resp_shape_id(conn) @@ -408,7 +411,7 @@ defmodule Electric.Plug.RouterTest do Task.async(fn -> conn( "GET", - "/v1/shape/wide_table?offset=#{next_offset}&columns=id,value1&shape_id=#{shape_id}&live" + "/v1/shape?table=wide_table&offset=#{next_offset}&columns=id,value1&shape_id=#{shape_id}&live" ) |> Router.call(opts) |> then(fn conn -> @@ -437,7 +440,7 @@ defmodule Electric.Plug.RouterTest do where = "value ILIKE 'yes%'" conn = - conn("GET", "/v1/shape/items", %{offset: "-1", where: where}) + conn("GET", "/v1/shape?table=items", %{offset: "-1", where: where}) |> Router.call(opts) assert %{status: 200} = conn @@ -447,7 +450,7 @@ defmodule Electric.Plug.RouterTest do task = Task.async(fn -> - conn("GET", "/v1/shape/items", %{ + conn("GET", "/v1/shape?table=items", %{ offset: "0_0", shape_id: shape_id, where: where, @@ -469,7 +472,7 @@ defmodule Electric.Plug.RouterTest do assert %{status: 200} = conn = - conn("GET", "/v1/shape/items", %{ + conn("GET", "/v1/shape?table=items", %{ offset: new_offset, shape_id: shape_id, where: where @@ -489,7 +492,7 @@ defmodule Electric.Plug.RouterTest do # Verify that a single row is in-shape initially. conn = - conn("GET", "/v1/shape/serial_ids", %{offset: "-1", where: where}) + conn("GET", "/v1/shape?table=serial_ids", %{offset: "-1", where: where}) |> Router.call(opts) assert %{status: 200} = conn @@ -508,7 +511,7 @@ defmodule Electric.Plug.RouterTest do task = Task.async(fn -> - conn("GET", "/v1/shape/serial_ids", %{ + conn("GET", "/v1/shape?table=serial_ids", %{ offset: "0_0", shape_id: shape_id, where: where, @@ -538,7 +541,7 @@ defmodule Electric.Plug.RouterTest do # DELETE operations, respectively. task = Task.async(fn -> - conn("GET", "/v1/shape/serial_ids", %{ + conn("GET", "/v1/shape?table=serial_ids", %{ offset: new_offset, shape_id: shape_id, where: where, @@ -597,7 +600,7 @@ defmodule Electric.Plug.RouterTest do # Verify that a two rows are in-shape initially. conn = - conn("GET", "/v1/shape/serial_ids", %{offset: "-1", where: where}) + conn("GET", "/v1/shape?table=serial_ids", %{offset: "-1", where: where}) |> Router.call(opts) assert %{status: 200} = conn @@ -622,7 +625,7 @@ defmodule Electric.Plug.RouterTest do # Simulate a move-in and a move-out by changing the PK of some rows. task = Task.async(fn -> - conn("GET", "/v1/shape/serial_ids", %{ + conn("GET", "/v1/shape?table=serial_ids", %{ offset: "0_0", shape_id: shape_id, where: where, @@ -684,7 +687,7 @@ defmodule Electric.Plug.RouterTest do second_val = String.duplicate("b", round(threshold * 0.7)) third_val = String.duplicate("c", round(threshold * 0.4)) - conn = conn("GET", "/v1/shape/large_rows_table?offset=-1") |> Router.call(opts) + conn = conn("GET", "/v1/shape?table=large_rows_table&offset=-1") |> Router.call(opts) assert %{status: 200} = conn [shape_id] = Plug.Conn.get_resp_header(conn, "electric-shape-id") [next_offset] = Plug.Conn.get_resp_header(conn, "electric-chunk-last-offset") @@ -696,7 +699,7 @@ defmodule Electric.Plug.RouterTest do Task.async(fn -> conn( "GET", - "/v1/shape/large_rows_table?offset=#{next_offset}&shape_id=#{shape_id}&live" + "/v1/shape?table=large_rows_table&offset=#{next_offset}&shape_id=#{shape_id}&live" ) |> Router.call(opts) end) @@ -710,7 +713,7 @@ defmodule Electric.Plug.RouterTest do assert %{status: 200} = Task.await(task) conn = - conn("GET", "/v1/shape/large_rows_table?offset=#{next_offset}&shape_id=#{shape_id}") + conn("GET", "/v1/shape?table=large_rows_table&offset=#{next_offset}&shape_id=#{shape_id}") |> Router.call(opts) assert %{status: 200} = conn @@ -731,7 +734,7 @@ defmodule Electric.Plug.RouterTest do [next_offset] = Plug.Conn.get_resp_header(conn, "electric-chunk-last-offset") conn = - conn("GET", "/v1/shape/large_rows_table?offset=#{next_offset}&shape_id=#{shape_id}") + conn("GET", "/v1/shape?table=large_rows_table&offset=#{next_offset}&shape_id=#{shape_id}") |> Router.call(opts) assert %{status: 200} = conn @@ -756,7 +759,7 @@ defmodule Electric.Plug.RouterTest do # Initial shape request # forces the shape to be created conn = - conn("GET", "/v1/shape/items", %{offset: "-1", where: where}) + conn("GET", "/v1/shape?table=items", %{offset: "-1", where: where}) |> Router.call(opts) assert %{status: 200} = conn @@ -767,7 +770,7 @@ defmodule Electric.Plug.RouterTest do # Make the next request but forget to include the where clause conn = - conn("GET", "/v1/shape/items", %{offset: next_offset, shape_id: shape_id}) + conn("GET", "/v1/shape?table=items", %{offset: next_offset, shape_id: shape_id}) |> Router.call(opts) assert %{status: 400} = conn @@ -780,7 +783,7 @@ defmodule Electric.Plug.RouterTest do } do # Make the next request but forget to include the where clause conn = - conn("GET", "/v1/shape/items", %{offset: "0_0", shape_id: "nonexistent"}) + conn("GET", "/v1/shape?table=items", %{offset: "0_0", shape_id: "nonexistent"}) |> Router.call(opts) assert %{status: 409} = conn @@ -788,7 +791,7 @@ defmodule Electric.Plug.RouterTest do new_shape_id = get_resp_header(conn, "electric-shape-id") assert get_resp_header(conn, "location") == - "/v1/shape/items?shape_id=#{new_shape_id}&offset=-1" + "/v1/shape?table=items&shape_id=#{new_shape_id}&offset=-1" end test "GET receives 409 when shape ID is not found but there is another shape matching the definition", @@ -800,7 +803,7 @@ defmodule Electric.Plug.RouterTest do # Initial shape request # forces the shape to be created conn = - conn("GET", "/v1/shape/items", %{offset: "-1", where: where}) + conn("GET", "/v1/shape?table=items", %{offset: "-1", where: where}) |> Router.call(opts) assert %{status: 200} = conn @@ -810,7 +813,11 @@ defmodule Electric.Plug.RouterTest do # Request the same shape definition but with invalid shape_id conn = - conn("GET", "/v1/shape/items", %{offset: "0_0", shape_id: "nonexistent", where: where}) + conn("GET", "/v1/shape?table=items", %{ + offset: "0_0", + shape_id: "nonexistent", + where: where + }) |> Router.call(opts) assert %{status: 409} = conn @@ -822,7 +829,7 @@ defmodule Electric.Plug.RouterTest do ] test "HEAD receives all headers", %{opts: opts} do conn_res = - conn("GET", "/v1/shape/items?offset=-1") + conn("GET", "/v1/shape?table=items&offset=-1") |> Router.call(opts) assert %{status: 200} = conn_res @@ -833,7 +840,7 @@ defmodule Electric.Plug.RouterTest do |> Enum.filter(&(Kernel.elem(&1, 0) != "x-request-id")) conn = - conn("HEAD", "/v1/shape/items?offset=-1") + conn("HEAD", "/v1/shape?table=items&offset=-1") |> Router.call(opts) assert %{status: 200} = conn @@ -848,7 +855,7 @@ defmodule Electric.Plug.RouterTest do test "OPTIONS receives supported methods", %{opts: opts} do conn = - conn("OPTIONS", "/v1/shape/items") + conn("OPTIONS", "/v1/shape?table=items") |> Router.call(opts) assert %{status: 204} = conn diff --git a/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs b/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs index 0341ba7c26..0e538583ef 100644 --- a/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs +++ b/packages/sync-service/test/electric/plug/serve_shape_plug_test.exs @@ -68,75 +68,16 @@ defmodule Electric.Plug.ServeShapePlugTest do end describe "ServeShapePlug" do - test "seconds_since_oct9th_2024_next_interval" do - # Mock the conn struct with assigns - # 20 seconds - conn = %Plug.Conn{ - assigns: %{config: %{long_poll_timeout: 20000}}, - query_params: %{"cursor" => nil} - } - - # Calculate the expected next interval - now = DateTime.utc_now() - oct9th2024 = DateTime.from_naive!(~N[2024-10-09 00:00:00], "Etc/UTC") - diff_in_seconds = DateTime.diff(now, oct9th2024, :second) - expected_interval = ceil(diff_in_seconds / 20) * 20 - - # Assert that the function returns the expected value - assert Electric.Plug.ServeShapePlug.TimeUtils.seconds_since_oct9th_2024_next_interval(conn) == - expected_interval - end - - test "seconds_since_oct9th_2024_next_interval with different timeout" do - # Mock the conn struct with a different timeout - # 30 seconds - conn = %Plug.Conn{ - assigns: %{config: %{long_poll_timeout: 30000}}, - query_params: %{"cursor" => nil} - } - - # Calculate the expected next interval - now = DateTime.utc_now() - oct9th2024 = DateTime.from_naive!(~N[2024-10-09 00:00:00], "Etc/UTC") - diff_in_seconds = DateTime.diff(now, oct9th2024, :second) - expected_interval = ceil(diff_in_seconds / 30) * 30 - - # Assert that the function returns the expected value - assert Electric.Plug.ServeShapePlug.TimeUtils.seconds_since_oct9th_2024_next_interval(conn) == - expected_interval - end - - test "seconds_since_oct9th_2024_next_interval with different timeout and cursor collision" do - # Mock the conn struct with a different timeout (30 seconds) - conn = %Plug.Conn{ - assigns: %{config: %{long_poll_timeout: 30000}}, - query_params: %{"cursor" => nil} - } - - # Calculate the expected next interval - now = DateTime.utc_now() - oct9th2024 = DateTime.from_naive!(~N[2024-10-09 00:00:00], "Etc/UTC") - diff_in_seconds = DateTime.diff(now, oct9th2024, :second) - expected_interval = ceil(diff_in_seconds / 30) * 30 - - # Simulate a cursor collision - conn = %{conn | query_params: %{"cursor" => "#{expected_interval}"}} - - # Assert that the function returns a DIFFERENT value due to collision - assert Electric.Plug.ServeShapePlug.TimeUtils.seconds_since_oct9th_2024_next_interval(conn) != - expected_interval - end - test "returns 400 for invalid params" do conn = - conn(:get, %{"root_table" => ".invalid_shape"}, "?offset=invalid") + conn(:get, %{"table" => ".invalid_shape"}, "?offset=invalid") |> ServeShapePlug.call([]) assert conn.status == 400 assert Jason.decode!(conn.resp_body) == %{ "offset" => ["has invalid format"], - "root_table" => [ + "table" => [ "Invalid zero-length delimited identifier" ] } @@ -146,19 +87,19 @@ defmodule Electric.Plug.ServeShapePlugTest do # this will pass table name validation # but will fail to find the table conn = - conn(:get, %{"root_table" => "_val1d_schëmaΦ$.Φtàble"}, "?offset=-1") + conn(:get, %{"table" => "_val1d_schëmaΦ$.Φtàble"}, "?offset=-1") |> ServeShapePlug.call([]) assert conn.status == 400 assert Jason.decode!(conn.resp_body) == %{ - "root_table" => ["table not found"] + "table" => ["table not found"] } end test "returns 400 for missing shape_id when offset != -1" do conn = - conn(:get, %{"root_table" => "public.users"}, "?offset=#{LogOffset.first()}") + conn(:get, %{"table" => "public.users"}, "?offset=#{LogOffset.first()}") |> ServeShapePlug.call([]) assert conn.status == 400 @@ -172,7 +113,7 @@ defmodule Electric.Plug.ServeShapePlugTest do conn = conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{LogOffset.before_all()}&live=true" ) |> ServeShapePlug.call([]) @@ -207,7 +148,7 @@ defmodule Electric.Plug.ServeShapePlugTest do end) conn = - conn(:get, %{"root_table" => "public.users"}, "?offset=-1") + conn(:get, %{"table" => "public.users"}, "?offset=-1") |> ServeShapePlug.call([]) assert conn.status == 200 @@ -255,7 +196,7 @@ defmodule Electric.Plug.ServeShapePlugTest do stale_age = 312 conn = - conn(:get, %{"root_table" => "public.users"}, "?offset=-1") + conn(:get, %{"table" => "public.users"}, "?offset=-1") |> put_in_config(:max_age, max_age) |> put_in_config(:stale_age, stale_age) |> ServeShapePlug.call([]) @@ -290,7 +231,7 @@ defmodule Electric.Plug.ServeShapePlugTest do end) conn = - conn(:get, %{"root_table" => "public.users"}, "?offset=-1") + conn(:get, %{"table" => "public.users"}, "?offset=-1") |> ServeShapePlug.call([]) assert Plug.Conn.get_resp_header(conn, "electric-schema") == [ @@ -323,7 +264,7 @@ defmodule Electric.Plug.ServeShapePlugTest do conn = conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{@start_offset_50}&shape_id=#{@test_shape_id}" ) |> ServeShapePlug.call([]) @@ -374,7 +315,7 @@ defmodule Electric.Plug.ServeShapePlugTest do conn = conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{@start_offset_50}&shape_id=#{@test_shape_id}" ) |> put_req_header( @@ -415,7 +356,7 @@ defmodule Electric.Plug.ServeShapePlugTest do Task.async(fn -> conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{@test_offset}&shape_id=#{@test_shape_id}&live=true" ) |> ServeShapePlug.call([]) @@ -472,7 +413,7 @@ defmodule Electric.Plug.ServeShapePlugTest do Task.async(fn -> conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{@test_offset}&shape_id=#{@test_shape_id}&live=true" ) |> ServeShapePlug.call([]) @@ -516,7 +457,7 @@ defmodule Electric.Plug.ServeShapePlugTest do conn = conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{@test_offset}&shape_id=#{@test_shape_id}&live=true" ) |> put_in_config(:long_poll_timeout, 100) @@ -546,7 +487,7 @@ defmodule Electric.Plug.ServeShapePlugTest do conn = conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{"50_12"}&shape_id=foo" ) |> ServeShapePlug.call([]) @@ -555,7 +496,10 @@ defmodule Electric.Plug.ServeShapePlugTest do assert Jason.decode!(conn.resp_body) == [%{"headers" => %{"control" => "must-refetch"}}] assert get_resp_header(conn, "electric-shape-id") == [@test_shape_id] - assert get_resp_header(conn, "location") == ["/?shape_id=#{@test_shape_id}&offset=-1"] + + assert get_resp_header(conn, "location") == [ + "/?table=public.users&shape_id=#{@test_shape_id}&offset=-1" + ] end test "creates a new shape when shape ID does not exist and sends a 409 redirecting to the newly created shape" do @@ -574,7 +518,7 @@ defmodule Electric.Plug.ServeShapePlugTest do conn = conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{"50_12"}&shape_id=#{@test_shape_id}" ) |> ServeShapePlug.call([]) @@ -583,7 +527,10 @@ defmodule Electric.Plug.ServeShapePlugTest do assert Jason.decode!(conn.resp_body) == [%{"headers" => %{"control" => "must-refetch"}}] assert get_resp_header(conn, "electric-shape-id") == [new_shape_id] - assert get_resp_header(conn, "location") == ["/?shape_id=#{new_shape_id}&offset=-1"] + + assert get_resp_header(conn, "location") == [ + "/?table=public.users&shape_id=#{new_shape_id}&offset=-1" + ] end test "sends 400 when shape ID does not match shape definition" do @@ -597,7 +544,7 @@ defmodule Electric.Plug.ServeShapePlugTest do conn = conn( :get, - %{"root_table" => "public.users"}, + %{"table" => "public.users"}, "?offset=#{"50_12"}&shape_id=#{@test_shape_id}" ) |> ServeShapePlug.call([]) @@ -610,7 +557,7 @@ defmodule Electric.Plug.ServeShapePlugTest do conn = conn( :get, - %{"root_table" => "public.users", "columns" => "value"}, + %{"table" => "public.users", "columns" => "value"}, "?offset=-1" ) |> ServeShapePlug.call([]) @@ -626,7 +573,7 @@ defmodule Electric.Plug.ServeShapePlugTest do conn = conn( :get, - %{"root_table" => "public.users", "columns" => "id,invalid"}, + %{"table" => "public.users", "columns" => "id,invalid"}, "?offset=-1" ) |> ServeShapePlug.call([]) diff --git a/packages/sync-service/test/electric/plug/utils_test.exs b/packages/sync-service/test/electric/plug/utils_test.exs index e79d347485..5ca1d67d7b 100644 --- a/packages/sync-service/test/electric/plug/utils_test.exs +++ b/packages/sync-service/test/electric/plug/utils_test.exs @@ -2,4 +2,52 @@ defmodule Electric.Plug.UtilsTest do alias Electric.Plug.Utils use ExUnit.Case, async: true doctest Utils, import: true + + describe "seconds_since_oct9th_2024_next_interval/2" do + test "returns expected interval" do + long_poll_timeout_ms = 20000 + long_poll_timeout_sec = div(long_poll_timeout_ms, 1000) + # Calculate the expected next interval + now = DateTime.utc_now() + oct9th2024 = DateTime.from_naive!(~N[2024-10-09 00:00:00], "Etc/UTC") + diff_in_seconds = DateTime.diff(now, oct9th2024, :second) + expected_interval = ceil(diff_in_seconds / long_poll_timeout_sec) * long_poll_timeout_sec + + # Assert that the function returns the expected value + assert Utils.seconds_since_oct9th_2024_next_interval(long_poll_timeout_ms) == + expected_interval + end + + test "returns expected inteval with different timeout" do + long_poll_timeout_ms = 30000 + long_poll_timeout_sec = div(long_poll_timeout_ms, 1000) + + # Calculate the expected next interval + now = DateTime.utc_now() + oct9th2024 = DateTime.from_naive!(~N[2024-10-09 00:00:00], "Etc/UTC") + diff_in_seconds = DateTime.diff(now, oct9th2024, :second) + expected_interval = ceil(diff_in_seconds / long_poll_timeout_sec) * long_poll_timeout_sec + + # Assert that the function returns the expected value + assert Utils.seconds_since_oct9th_2024_next_interval(long_poll_timeout_ms) == + expected_interval + end + + test "returns expected interval with different timeout and cursor collision" do + long_poll_timeout_ms = 30000 + long_poll_timeout_sec = div(long_poll_timeout_ms, 1000) + + # Calculate the expected next interval + now = DateTime.utc_now() + oct9th2024 = DateTime.from_naive!(~N[2024-10-09 00:00:00], "Etc/UTC") + diff_in_seconds = DateTime.diff(now, oct9th2024, :second) + expected_interval = ceil(diff_in_seconds / long_poll_timeout_sec) * long_poll_timeout_sec + + # Assert that the function returns a DIFFERENT value due to collision + assert Utils.seconds_since_oct9th_2024_next_interval( + long_poll_timeout_ms, + "#{expected_interval}" + ) != expected_interval + end + end end diff --git a/packages/sync-service/test/electric/shapes/shape_test.exs b/packages/sync-service/test/electric/shapes/shape_test.exs index 2db3e5f01f..7fd3a13385 100644 --- a/packages/sync-service/test/electric/shapes/shape_test.exs +++ b/packages/sync-service/test/electric/shapes/shape_test.exs @@ -285,14 +285,14 @@ defmodule Electric.Shapes.ShapeTest do end test "errors on empty table name", %{inspector: inspector} do - {:error, {:root_table, ["Invalid zero-length delimited identifier"]}} = + {:error, {:table, ["Invalid zero-length delimited identifier"]}} = Shape.new("", inspector: inspector) end test "errors when the table doesn't exist", %{inspector: inspector} do {:error, { - :root_table, + :table, [ ~S|Table "nonexistent" does not exist. If the table name contains capitals or special characters you must quote it.| ] diff --git a/packages/typescript-client/README.md b/packages/typescript-client/README.md index 3cc59033e7..ac42d2f184 100644 --- a/packages/typescript-client/README.md +++ b/packages/typescript-client/README.md @@ -50,7 +50,8 @@ import { ShapeStream } from '@electric-sql/client' // Passes subscribers rows as they're inserted, updated, or deleted const stream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/foo`, + url: `${BASE_URL}/v1/shape`, + table: `foo`, }) stream.subscribe(messages => { @@ -66,7 +67,8 @@ stream.subscribe(messages => { import { ShapeStream, Shape } from '@electric-sql/client' const stream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/foo`, + url: `${BASE_URL}/v1/shape`, + table: `foo`, }) const shape = new Shape(stream) diff --git a/packages/typescript-client/src/client.ts b/packages/typescript-client/src/client.ts index 1434d07346..0fad65f0fa 100644 --- a/packages/typescript-client/src/client.ts +++ b/packages/typescript-client/src/client.ts @@ -26,6 +26,7 @@ import { SHAPE_ID_QUERY_PARAM, SHAPE_SCHEMA_HEADER, WHERE_QUERY_PARAM, + TABLE_QUERY_PARAM, } from './constants' /** @@ -33,10 +34,16 @@ import { */ export interface ShapeStreamOptions { /** - * The full URL to where the Shape is hosted. This can either be the Electric server - * directly or a proxy. E.g. for a local Electric instance, you might set `http://localhost:3000/v1/shape/foo` + * The full URL to where the Shape is served. This can either be the Electric server + * directly or a proxy. E.g. for a local Electric instance, you might set `http://localhost:3000/v1/shape` */ url: string + + /** + * The root table for the shape. + */ + table: string + /** * The where clauses for the shape. */ @@ -196,7 +203,7 @@ export class ShapeStream = Row> async start() { this.#isUpToDate = false - const { url, where, columns, signal } = this.options + const { url, table, where, columns, signal } = this.options try { while ( @@ -204,6 +211,7 @@ export class ShapeStream = Row> this.options.subscribe ) { const fetchUrl = new URL(url) + fetchUrl.searchParams.set(TABLE_QUERY_PARAM, table) if (where) fetchUrl.searchParams.set(WHERE_QUERY_PARAM, where) if (columns && columns.length > 0) fetchUrl.searchParams.set(COLUMNS_QUERY_PARAM, columns.join(`,`)) @@ -409,7 +417,10 @@ export class ShapeStream = Row> function validateOptions(options: Partial>): void { if (!options.url) { - throw new Error(`Invalid shape option. It must provide the url`) + throw new Error(`Invalid shape options. It must provide the url`) + } + if (!options.table) { + throw new Error(`Invalid shape options. It must provide the table`) } if (options.signal && !(options.signal instanceof AbortSignal)) { throw new Error( diff --git a/packages/typescript-client/src/constants.ts b/packages/typescript-client/src/constants.ts index c2ba435eab..39c4067770 100644 --- a/packages/typescript-client/src/constants.ts +++ b/packages/typescript-client/src/constants.ts @@ -8,4 +8,5 @@ export const SHAPE_ID_QUERY_PARAM = `shape_id` export const OFFSET_QUERY_PARAM = `offset` export const WHERE_QUERY_PARAM = `where` export const COLUMNS_QUERY_PARAM = `columns` +export const TABLE_QUERY_PARAM = `table` export const LIVE_QUERY_PARAM = `live` diff --git a/packages/typescript-client/src/shape.ts b/packages/typescript-client/src/shape.ts index 74bdd5b783..2ca705fd18 100644 --- a/packages/typescript-client/src/shape.ts +++ b/packages/typescript-client/src/shape.ts @@ -20,7 +20,7 @@ export type ShapeChangedCallback = Row> = ( * @param {ShapeStream} - the underlying shape stream * @example * ``` - * const shapeStream = new ShapeStream<{ foo: number }>(url: 'http://localhost:3000/v1/shape/foo'}) + * const shapeStream = new ShapeStream<{ foo: number }>(url: `http://localhost:3000/v1/shape`, table: `foo`}) * const shape = new Shape(shapeStream) * ``` * diff --git a/packages/typescript-client/test/cache.test.ts b/packages/typescript-client/test/cache.test.ts index 49d348805a..9db79c2f1a 100644 --- a/packages/typescript-client/test/cache.test.ts +++ b/packages/typescript-client/test/cache.test.ts @@ -72,7 +72,7 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => { }) => { // First request get initial request const initialRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=-1`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) @@ -82,13 +82,14 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => { // add some data and follow with live request await insertIssues({ title: `foo` }) const searchParams = new URLSearchParams({ + table: issuesTableUrl, offset: initialRes.headers.get(`electric-chunk-last-offset`)!, shape_id: initialRes.headers.get(`electric-shape-id`)!, live: `true`, }) const liveRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?${searchParams.toString()}`, + `${proxyCacheBaseUrl}/v1/shape?${searchParams.toString()}`, {} ) expect(liveRes.status).toBe(200) @@ -96,7 +97,7 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => { // Second request gets a cached response const cachedRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?${searchParams.toString()}`, + `${proxyCacheBaseUrl}/v1/shape?${searchParams.toString()}`, {} ) expect(cachedRes.status).toBe(200) @@ -110,7 +111,7 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => { }) => { // First request gets non-cached response const originalRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=-1`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) @@ -119,7 +120,7 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => { // Second request gets cached response const cachedRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=-1`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) expect(cachedRes.status).toBe(200) @@ -132,12 +133,12 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => { issuesTableUrl, }) => { const originalRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=-1`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) const lastOffset = originalRes.headers.get(CHUNK_LAST_OFFSET_HEADER) const shapeId = originalRes.headers.get(SHAPE_ID_HEADER) - const urlToTest = `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=${lastOffset}&shape_id=${shapeId}` + const urlToTest = `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=${lastOffset}&shape_id=${shapeId}` // Make a first request such that response is cached const originalUpToDateRes = await fetch(urlToTest, {}) @@ -166,12 +167,12 @@ describe(`HTTP Proxy Cache`, { timeout: 30000 }, () => { issuesTableUrl, }) => { const originalRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=-1`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) const lastOffset = originalRes.headers.get(CHUNK_LAST_OFFSET_HEADER) const shapeId = originalRes.headers.get(SHAPE_ID_HEADER) - const urlToTest = `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=${lastOffset}&shape_id=${shapeId}` + const urlToTest = `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=${lastOffset}&shape_id=${shapeId}` // Make a first request such that response is cached const originalUpToDateRes = await fetch(urlToTest, {}) @@ -209,7 +210,7 @@ describe(`HTTP Initial Data Caching`, { timeout: 30000 }, () => { // Make a client that fetches a shape // which forces the shape data to be cached const client1Res = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=-1`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) expect(client1Res.status).toBe(200) @@ -222,7 +223,7 @@ describe(`HTTP Initial Data Caching`, { timeout: 30000 }, () => { // Make a 2nd client that fetches the shape // check that it is served from cached data const client2Res = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=-1`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) expect(client2Res.status).toBe(200) @@ -245,7 +246,7 @@ describe(`HTTP Initial Data Caching`, { timeout: 30000 }, () => { // should tell you to go back to initial sync // because the shape is out of scope const liveRes = await fetch( - `${proxyCacheBaseUrl}/v1/shape/${issuesTableUrl}?offset=${latestOffset}&shape_id=${originalShapeId}&live`, + `${proxyCacheBaseUrl}/v1/shape?table=${issuesTableUrl}&offset=${latestOffset}&shape_id=${originalShapeId}&live`, {} ) expect(liveRes.status).toBe(409) diff --git a/packages/typescript-client/test/client.test-d.ts b/packages/typescript-client/test/client.test-d.ts index 1a81cc69dd..5b563f602c 100644 --- a/packages/typescript-client/test/client.test-d.ts +++ b/packages/typescript-client/test/client.test-d.ts @@ -19,6 +19,7 @@ describe(`client`, () => { describe(`ShapeStream`, () => { it(`should infer generic row return type when no type is provided`, () => { const shapeStream = new ShapeStream({ + table: ``, url: ``, }) @@ -30,6 +31,7 @@ describe(`client`, () => { it(`should infer correct return type when provided`, () => { const shapeStream = new ShapeStream({ + table: ``, url: ``, parser: { timestampz: (date: string) => { @@ -50,6 +52,7 @@ describe(`client`, () => { describe(`Shape`, () => { it(`should infer generic row return type when no type is provided`, async () => { const shapeStream = new ShapeStream({ + table: ``, url: ``, }) const shape = new Shape(shapeStream) @@ -66,6 +69,7 @@ describe(`client`, () => { it(`should infer correct return type when provided`, async () => { const shapeStream = new ShapeStream({ + table: ``, url: ``, parser: { timestampz: (date: string) => { diff --git a/packages/typescript-client/test/client.test.ts b/packages/typescript-client/test/client.test.ts index 0170033de9..b84569a361 100644 --- a/packages/typescript-client/test/client.test.ts +++ b/packages/typescript-client/test/client.test.ts @@ -10,7 +10,8 @@ describe(`Shape`, () => { it(`should sync an empty shape`, async ({ issuesTableUrl }) => { const start = Date.now() const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, }) const shape = new Shape(shapeStream) const map = await shape.value @@ -31,7 +32,8 @@ describe(`Shape`, () => { const start = Date.now() const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, }) const shape = new Shape(shapeStream) @@ -65,7 +67,8 @@ describe(`Shape`, () => { const start = Date.now() const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, }) const shape = new Shape(shapeStream) @@ -158,7 +161,8 @@ describe(`Shape`, () => { } const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, fetchClient: fetchWrapper, }) @@ -195,7 +199,8 @@ describe(`Shape`, () => { const start = Date.now() const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, }) const shape = new Shape(shapeStream) @@ -228,7 +233,8 @@ describe(`Shape`, () => { it(`should support unsubscribe`, async ({ issuesTableUrl }) => { const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, }) const shape = new Shape(shapeStream) @@ -244,7 +250,8 @@ describe(`Shape`, () => { it(`should expose connection status`, async ({ issuesTableUrl }) => { const aborter = new AbortController() const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, }) @@ -268,7 +275,8 @@ describe(`Shape`, () => { }) => { let fetchShouldFail = false const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, fetchClient: async (_input, _init) => { if (fetchShouldFail) throw new FetchError( @@ -303,7 +311,8 @@ describe(`Shape`, () => { issuesTableUrl, }) => { const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, subscribe: false, }) @@ -316,7 +325,8 @@ describe(`Shape`, () => { it(`should expose isLoading status`, async ({ issuesTableUrl }) => { const shapeStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, fetchClient: async (input, init) => { await sleep(20) return fetch(input, init) diff --git a/packages/typescript-client/test/fetch.test.ts b/packages/typescript-client/test/fetch.test.ts index ddbd0f6c64..ffbdf14050 100644 --- a/packages/typescript-client/test/fetch.test.ts +++ b/packages/typescript-client/test/fetch.test.ts @@ -173,7 +173,7 @@ describe(`createFetchWithBackoff`, () => { }) describe(`createFetchWithChunkBuffer`, () => { - const baseUrl = `https://example.com/v1/shape/foo` + const baseUrl = `https://example.com/v1/shape?table=foo` let mockFetch: Mock const responseHeaders = (headers: Record) => { return new Headers(headers) @@ -218,7 +218,7 @@ describe(`createFetchWithChunkBuffer`, () => { expect(result).toBe(initialResponse) // Check if the next chunk was prefetched - const nextUrl = `${baseUrl}?shape_id=123&offset=456` + const nextUrl = `${baseUrl}&shape_id=123&offset=456` expect(mockFetch).toHaveBeenCalledWith(nextUrl, expect.anything()) }) @@ -250,23 +250,23 @@ describe(`createFetchWithChunkBuffer`, () => { expect(mockFetch).toHaveBeenCalledTimes(1 + maxPrefetchNum) expect(mockFetch).toHaveBeenNthCalledWith( 2, - `${baseUrl}?shape_id=123&offset=0`, + `${baseUrl}&shape_id=123&offset=0`, expect.anything() ) expect(mockFetch).toHaveBeenNthCalledWith( 3, - `${baseUrl}?shape_id=123&offset=1`, + `${baseUrl}&shape_id=123&offset=1`, expect.anything() ) // Second request consumes one of the prefetched responses and // next one fires up - await fetchWrapper(`${baseUrl}?shape_id=123&offset=0`) + await fetchWrapper(`${baseUrl}&shape_id=123&offset=0`) await sleep() expect(mockFetch).toHaveBeenCalledTimes(1 + maxPrefetchNum + 1) expect(mockFetch).toHaveBeenNthCalledWith( 4, - `${baseUrl}?shape_id=123&offset=2`, + `${baseUrl}&shape_id=123&offset=2`, expect.anything() ) }) @@ -297,7 +297,7 @@ describe(`createFetchWithChunkBuffer`, () => { expect(result).toBe(initialResponse) // fetch the next chunk as well - const nextUrl = `${baseUrl}?shape_id=123&offset=456` + const nextUrl = `${baseUrl}&shape_id=123&offset=456` const nextResult = await fetchWrapper(nextUrl) expect(nextResult).toBe(nextResponse) @@ -339,7 +339,7 @@ describe(`createFetchWithChunkBuffer`, () => { expect(result).toBe(initialResponse) // Prefetch should have been attempted but failed - const nextUrl = `${baseUrl}?shape_id=123&offset=456` + const nextUrl = `${baseUrl}&shape_id=123&offset=456` expect(mockFetch).toHaveBeenCalledWith(nextUrl, expect.anything()) // One for the main request, one for the prefetch @@ -370,7 +370,7 @@ describe(`createFetchWithChunkBuffer`, () => { expect(mockFetch).toHaveBeenCalledTimes(2) // requesting a different path should clear the prefetches - const altUrl = `${baseUrl}/bar` + const altUrl = `${baseUrl}_alt` await fetchWrapper(altUrl) await sleep() @@ -381,7 +381,7 @@ describe(`createFetchWithChunkBuffer`, () => { expect(mockFetch).toHaveBeenNthCalledWith(1, baseUrl) expect(mockFetch).toHaveBeenNthCalledWith( 2, - `${baseUrl}?shape_id=123&offset=0`, + `${baseUrl}&shape_id=123&offset=0`, expect.anything() ) @@ -389,12 +389,12 @@ describe(`createFetchWithChunkBuffer`, () => { expect(mockFetch).toHaveBeenNthCalledWith(3, altUrl) expect(mockFetch).toHaveBeenNthCalledWith( 4, - `${altUrl}?shape_id=123&offset=2`, + `${altUrl}&shape_id=123&offset=2`, expect.anything() ) expect(mockFetch).toHaveBeenNthCalledWith( 5, - `${altUrl}?shape_id=123&offset=3`, + `${altUrl}&shape_id=123&offset=3`, expect.anything() ) }) diff --git a/packages/typescript-client/test/integration.test.ts b/packages/typescript-client/test/integration.test.ts index 441097d9b1..a82805c6ab 100644 --- a/packages/typescript-client/test/integration.test.ts +++ b/packages/typescript-client/test/integration.test.ts @@ -28,7 +28,8 @@ describe(`HTTP Sync`, () => { // Get initial data const shapeData = new Map() const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, subscribe: false, signal: aborter.signal, }) @@ -65,7 +66,8 @@ describe(`HTTP Sync`, () => { // Get initial data const shapeData = new Map() const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, fetchClient: fetchWrapper, }) @@ -116,7 +118,7 @@ describe(`HTTP Sync`, () => { issuesTableUrl, }) => { const res = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) const shapeId = res.headers.get(`electric-shape-id`) @@ -127,7 +129,7 @@ describe(`HTTP Sync`, () => { issuesTableUrl, }) => { const res = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) const lastOffset = res.headers.get(`electric-chunk-last-offset`) @@ -146,7 +148,8 @@ describe(`HTTP Sync`, () => { // Get initial data const shapeData = new Map() const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, }) @@ -221,7 +224,8 @@ describe(`HTTP Sync`, () => { // Now fetch the data from the HTTP endpoint const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${tableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: tableUrl, signal: aborter.signal, }) const client = new Shape(issueStream) @@ -296,7 +300,9 @@ describe(`HTTP Sync`, () => { ) await vi.waitFor(async () => { - const res = await fetch(`${BASE_URL}/v1/shape/${tableUrl}?offset=-1`) + const res = await fetch( + `${BASE_URL}/v1/shape?table=${tableUrl}&offset=-1` + ) const body = (await res.json()) as Message[] expect(body.length).greaterThan(1) }) @@ -348,7 +354,8 @@ describe(`HTTP Sync`, () => { const shapeData = new Map() const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, }) let secondRowId = `` @@ -395,7 +402,8 @@ describe(`HTTP Sync`, () => { const shapeData = new Map() const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, fetchClient: fetchWrapper, }) @@ -437,14 +445,16 @@ describe(`HTTP Sync`, () => { const shapeData1 = new Map() const aborter1 = new AbortController() const issueStream1 = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter1.signal, }) const shapeData2 = new Map() const aborter2 = new AbortController() const issueStream2 = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter2.signal, }) @@ -484,7 +494,8 @@ describe(`HTTP Sync`, () => { let lastOffset: Offset = `-1` const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, signal: aborter.signal, subscribe: false, }) @@ -505,7 +516,7 @@ describe(`HTTP Sync`, () => { // And wait until it's definitely seen await vi.waitFor(async () => { const res = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1` + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1` ) const body = (await res.json()) as Message[] expect(body).toHaveLength(12) @@ -514,7 +525,8 @@ describe(`HTTP Sync`, () => { let catchupOpsCount = 0 const newAborter = new AbortController() const newIssueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, subscribe: false, signal: newAborter.signal, offset: lastOffset, @@ -537,7 +549,7 @@ describe(`HTTP Sync`, () => { insertIssues, }) => { const res = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) const cacheHeaders = res.headers.get(`cache-control`) @@ -563,7 +575,7 @@ describe(`HTTP Sync`, () => { await sleep(40) const res2 = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) const etag2Header = res2.headers.get(`etag`) @@ -573,7 +585,7 @@ describe(`HTTP Sync`, () => { it(`should revalidate etags`, async ({ issuesTableUrl, insertIssues }) => { // Start the shape - await fetch(`${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1`, {}) + await fetch(`${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`, {}) // Fill it up in separate transactions for (const i of [1, 2, 3, 4, 5, 6, 7, 8, 9]) { await insertIssues({ title: `foo${i}` }) @@ -582,7 +594,7 @@ describe(`HTTP Sync`, () => { await sleep(100) const res = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`, {} ) const messages = (await res.json()) as Message[] @@ -595,7 +607,7 @@ describe(`HTTP Sync`, () => { assert(etag !== null, `Response should have etag header`) const etagValidation = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1`, { headers: { 'If-None-Match': etag }, } @@ -606,7 +618,7 @@ describe(`HTTP Sync`, () => { // Get etag for catchup const catchupEtagRes = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=${midOffset}&shape_id=${shapeId}`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=${midOffset}&shape_id=${shapeId}`, {} ) const catchupEtag = catchupEtagRes.headers.get(`etag`) @@ -615,7 +627,7 @@ describe(`HTTP Sync`, () => { // Catch-up offsets should also use the same etag as they're // also working through the end of the current log. const catchupEtagValidation = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=${midOffset}&shape_id=${shapeId}`, + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=${midOffset}&shape_id=${shapeId}`, { headers: { 'If-None-Match': catchupEtag }, } @@ -641,7 +653,8 @@ describe(`HTTP Sync`, () => { // Get initial data const shapeData = new Map() const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, where: `title LIKE 'foo%'`, subscribe: true, signal: aborter.signal, @@ -677,7 +690,8 @@ describe(`HTTP Sync`, () => { // Get initial data const shapeData = new Map() const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${tableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: tableUrl, columns: [`txt`, `i2`, `i4`], signal: aborter.signal, }) @@ -721,7 +735,8 @@ describe(`HTTP Sync`, () => { // Get initial data let lastOffset: Offset = `-1` const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, subscribe: true, signal: aborter.signal, }) @@ -753,7 +768,7 @@ describe(`HTTP Sync`, () => { // And wait until it's definitely seen await vi.waitFor(async () => { const res = await fetch( - `${BASE_URL}/v1/shape/${issuesTableUrl}?offset=-1` + `${BASE_URL}/v1/shape?table=${issuesTableUrl}&offset=-1` ) const body = (await res.json()) as Message[] expect(body.length).greaterThan(2) @@ -772,7 +787,8 @@ describe(`HTTP Sync`, () => { const newAborter = new AbortController() const newIssueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, subscribe: false, signal: newAborter.signal, offset: lastOffset, @@ -836,7 +852,8 @@ describe(`HTTP Sync`, () => { } const issueStream = new ShapeStream({ - url: `${BASE_URL}/v1/shape/${issuesTableUrl}`, + url: `${BASE_URL}/v1/shape`, + table: issuesTableUrl, subscribe: true, signal: aborter.signal, fetchClient: fetchWrapper, diff --git a/packages/typescript-client/test/stream.test.ts b/packages/typescript-client/test/stream.test.ts index 9c1ab77d43..e816e41b83 100644 --- a/packages/typescript-client/test/stream.test.ts +++ b/packages/typescript-client/test/stream.test.ts @@ -2,7 +2,7 @@ import { afterEach, beforeEach, describe, expect, it } from 'vitest' import { ShapeStream } from '../src' describe(`ShapeStream`, () => { - const shapeUrl = `https://example.com/v1/shape/foo` + const shapeUrl = `https://example.com/v1/shape` let aborter: AbortController beforeEach(() => { @@ -25,6 +25,7 @@ describe(`ShapeStream`, () => { const aborter = new AbortController() new ShapeStream({ url: shapeUrl, + table: `foo`, signal: aborter.signal, fetchClient: fetchWrapper, headers: { diff --git a/packages/typescript-client/test/support/test-context.ts b/packages/typescript-client/test/support/test-context.ts index 3651211354..e21d56878a 100644 --- a/packages/typescript-client/test/support/test-context.ts +++ b/packages/typescript-client/test/support/test-context.ts @@ -38,7 +38,7 @@ export const testWithDbClient = test.extend<{ await use(async (table: string, shapeId?: string) => { const baseUrl = inject(`baseUrl`) const resp = await fetch( - `${baseUrl}/v1/shape/${table}${shapeId ? `?shape_id=${shapeId}` : ``}`, + `${baseUrl}/v1/shape?table=${table}${shapeId ? `&shape_id=${shapeId}` : ``}`, { method: `DELETE`, } @@ -47,7 +47,7 @@ export const testWithDbClient = test.extend<{ console.error( await FetchError.fromResponse( resp, - `DELETE ${baseUrl}/v1/shape/${table}` + `DELETE ${baseUrl}/v1/shape?table=${table}` ) ) throw new Error(`Could not delete shape ${table} with ID ${shapeId}`) diff --git a/website/docs/api/clients/typescript.md b/website/docs/api/clients/typescript.md index 213825a264..23c9cbcc95 100644 --- a/website/docs/api/clients/typescript.md +++ b/website/docs/api/clients/typescript.md @@ -27,7 +27,8 @@ import { ShapeStream } from '@electric-sql/client' // Passes subscribers rows as they're inserted, updated, or deleted const stream = new ShapeStream({ - url: `http://localhost:3000/v1/shape/foo`, + url: `http://localhost:3000/v1/shape`, + table: `foo`, }) stream.subscribe(messages => { @@ -51,7 +52,8 @@ For example, we can extend the [default parser](https://github.com/electric-sql/ ```ts const stream = new ShapeStream({ - url: `http://localhost:3000/v1/shape/foo`, + url: `http://localhost:3000/v1/shape`, + table: `foo`, parser: { bool: (value: string) => value === `true` ? 1 : 0 } @@ -64,7 +66,8 @@ const stream = new ShapeStream({ import { ShapeStream, Shape } from '@electric-sql/client' const stream = new ShapeStream({ - url: `http://localhost:3000/v1/shape/foo`, + url: `http://localhost:3000/v1/shape`, + table: `foo`, }) const shape = new Shape(stream) diff --git a/website/docs/api/http.md b/website/docs/api/http.md index 1baf13ce99..70647a85a9 100644 --- a/website/docs/api/http.md +++ b/website/docs/api/http.md @@ -23,14 +23,14 @@ The rest of this page will describe the features of the API. ## Syncing shapes The API allows you to sync [Shapes](/docs/guides/shapes) of data out of Postgres using the - GET /v1/shape endpoint. The pattern is as follows. First you make an initial sync request to get the current data for the Shape, such as: ```sh -curl -i 'http://localhost:3000/v1/shape/foo?offset=-1' +curl -i 'http://localhost:3000/v1/shape?table=foo&offset=-1' ``` Then you switch into a live mode to use long-polling to receive real-time updates. We'll go over these steps in more detail below. First a note on the data that the endpoint returns. @@ -87,7 +87,7 @@ Note that the other control message is `must-refetch` which indicates that the c Once a client is up-to-date, it can switch to live mode to receive real-time updates, by making requests with `live=true`, an `offset` and a `shape_id`, e.g.: ```sh -curl -i 'http://localhost:3000/v1/shape/foo?live=true&offset=0_0&shape_id=3833821-1721812114261' +curl -i 'http://localhost:3000/v1/shape?table=foo&live=true&offset=0_0&shape_id=3833821-1721812114261' ``` The `live` parameter puts the server into live mode, where it will hold open the connection, waiting for new data arrive. This allows you to implement a long-polling strategy to consume real-time updates. diff --git a/website/docs/api/integrations/react.md b/website/docs/api/integrations/react.md index 77f44b6f20..ced62680da 100644 --- a/website/docs/api/integrations/react.md +++ b/website/docs/api/integrations/react.md @@ -12,7 +12,8 @@ import { useShape } from "@electric-sql/react" export default function MyComponent() { const { isLoading, lastSyncedAt, data } = useShape<{ title: string}>({ - url: `http://localhost:3000/v1/shape/foo`, + url: `http://localhost:3000/v1/shape`, + table: `foo`, }) if (isLoading) { diff --git a/website/docs/guides/auth.md b/website/docs/guides/auth.md index 2bf73a7328..ca537033b9 100644 --- a/website/docs/guides/auth.md +++ b/website/docs/guides/auth.md @@ -82,15 +82,13 @@ Then for the `/api/shapes/users` route: ```tsx export async function GET( request: Request, - { params }: { params: { table: string } } ) { const url = new URL(request.url) - const { table } = params // Construct the upstream URL - const originUrl = new URL(`http://localhost:3000/v1/shape/${table}`) + const originUrl = new URL(`http://localhost:3000/v1/shape`) - // Copy over the shape_id & offset query params that the + // Copy over the table, shape_id, and offset query params that the // Electric client adds so we return the right part of the Shape log. url.searchParams.forEach((value, key) => { if ([`shape_id`, `offset`].includes(key)) { diff --git a/website/docs/guides/shapes.md b/website/docs/guides/shapes.md index 9cf1d17cfc..5569ef0d9c 100644 --- a/website/docs/guides/shapes.md +++ b/website/docs/guides/shapes.md @@ -38,13 +38,13 @@ A client can choose to sync one shape, or lots of shapes. Many clients can sync Shapes are defined by: -- a `root_table`, such as `projects` +- a `table`, such as `projects` - a `where` clause, used to filter the rows in that table, such as `status='active'` > [!IMPORTANT] Limitations > Shapes are currently single table, whole row only. You can sync all the rows in a table, or a subset of the rows in that table. You can't yet [select columns](#whole-rows) or sync an [include tree](#single-table) without filtering or joining in the client. -### `root_table` +### `table` This is the root table of the shape. It must match a table in your Postgres database. @@ -52,7 +52,7 @@ The value can be just a tablename like `projects`, or can be a qualified tablena ### `where` clause -Optional where clause to filter rows in the `root_table`. +Optional where clause to filter rows in the `table`. This must be a valid [PostgreSQL WHERE clause](https://www.postgresql.org/docs/current/queries-table-expressions.html#QUERIES-WHERE) using SQL syntax, e.g.: @@ -91,18 +91,18 @@ client. In the client, shapes can be held as objects in memory, for example usin ### HTTP You can sync shapes manually using the - GET /v1/shape endpoint. First make an initial sync request to get the current data for the Shape, such as: ```sh -curl -i 'http://localhost:3000/v1/shape/foo?offset=-1' +curl -i 'http://localhost:3000/v1/shape?table=foo&offset=-1' ``` Then switch into a live mode to use long-polling to receive real-time updates: ```sh -curl -i 'http://localhost:3000/v1/shape/foo?live=true&offset=...&shape_id=...' +curl -i 'http://localhost:3000/v1/shape?table=foo&live=true&offset=...&shape_id=...' ``` These requests both return an array of [Shape Log](/docs/api/http#shape-log) entries. You can process these manually, or use a higher-level client. @@ -123,7 +123,8 @@ Instantiate a `ShapeStream` and materialise into a `Shape`: import { ShapeStream, Shape } from '@electric-sql/client' const stream = new ShapeStream({ - url: `http://localhost:3000/v1/shape/foo`, + url: `http://localhost:3000/v1/shape`, + table: `foo`, }) const shape = new Shape(stream) diff --git a/website/docs/quickstart.md b/website/docs/quickstart.md index a4b713a1dc..b114baf2a5 100644 --- a/website/docs/quickstart.md +++ b/website/docs/quickstart.md @@ -36,13 +36,13 @@ First let's try the low-level [HTTP API](/docs/api/http). In a new terminal, use `curl` to request a [Shape](/docs/guides/shapes) containing all rows in the `foo` table: ```sh -curl -i 'http://localhost:3000/v1/shape/foo?offset=-1' +curl -i 'http://localhost:3000/v1/shape?table=foo&offset=-1' ``` ::: info A bit of explanation about the URL structure. -- `/v1/shape/` is a standard prefix with the API version and the shape sync endpoint path -- `foo` is the name of the [`root_table`](/docs/guides/shapes#root-table) of the shape (and is required); if you wanted to sync data from the `items` table, you would change the path to `/v1/shape/items` +- `/v1/shape` is a standard prefix with the API version and the shape sync endpoint path +- `foo` is the name of the [`table`](/docs/guides/shapes#table) of the shape (and is required); if you wanted to sync data from the `items` table, you would change the path to `/v1/shape?table=items` - `offset=-1` means we're asking for the *entire* Shape as we don't have any of the data cached locally yet. If we had previously fetched the shape and wanted to see if there were any updates, we'd set the offset to the last offset we'd already seen. ::: @@ -60,7 +60,7 @@ access-control-expose-headers: * access-control-allow-methods: GET, POST, OPTIONS content-type: application/json; charset=utf-8 -{"root_table":["table not found"]} +{"table":["table not found"]} ``` So it didn't work! Which makes sense... as it's an empty database without any tables or data. Let's fix that. @@ -99,7 +99,7 @@ INSERT INTO foo (name, value) VALUES Exit your Postgres client (e.g.: with `psql` enter `\q`) and try the `curl` request again: ```sh -curl -i 'http://localhost:3000/v1/shape/foo?offset=-1' +curl -i 'http://localhost:3000/v1/shape?table=foo&offset=-1' ``` Success! You should see the data you just put into Postgres in the shape response: @@ -158,7 +158,8 @@ import { useShape } from '@electric-sql/react' function Component() { const { data } = useShape({ - url: `http://localhost:3000/v1/shape/foo`, + url: `http://localhost:3000/v1/shape`, + table: `foo` }) return ( diff --git a/website/electric-api.yaml b/website/electric-api.yaml index 755bed46e0..850100854f 100644 --- a/website/electric-api.yaml +++ b/website/electric-api.yaml @@ -17,19 +17,19 @@ servers: description: Local server paths: - /v1/shape/{root_table}: + /v1/shape: get: summary: Get Shape description: |- Load the initial data for a shape and poll for real-time updates. - Define your shape using the `root_table` and `where` parameters. + Define your shape using the `table` and `where` parameters. Use `offset` to fetch data from a specific position in the shape log and the `live` parameter to consume real-time updates. parameters: - # Path parameters - - name: root_table - in: path + # Query parameters + - name: table + in: query schema: type: string examples: @@ -46,7 +46,6 @@ paths: Can be just a tablename, or can be prefixed by the database schema using a `.` delimiter, such as `foo.issues`. If you don't provide a schema prefix, then the table is assumed to be in the `public.` schema. - # Query parameters - name: offset in: query schema: @@ -110,7 +109,7 @@ paths: schema: type: string description: |- - Optional where clause to filter rows in the `root_table`. + Optional where clause to filter rows in the `table`. This should be a valid PostgreSQL WHERE clause using SQL syntax. examples: @@ -126,7 +125,7 @@ paths: schema: type: string description: |- - Optional list of columns to include in the rows from the `root_table`. + Optional list of columns to include in the rows from the `table`. They should always include the primary key columns, and should be formed as a comma separated list of column names exactly as they are in the database schema. @@ -345,9 +344,9 @@ paths: **NOTE** Delete shape only works if Electric is configured to `allow_shape_deletion`. parameters: - # Path parameters - - name: root_table - in: path + # Query parameters + - name: table + in: query schema: type: string examples: @@ -362,7 +361,6 @@ paths: The name of the table for which to delete the shape. Can be qualified by the schema name. - # Query parameters - name: shape_id in: query schema: diff --git a/website/src/partials/home-cta.md b/website/src/partials/home-cta.md index 645647a4c9..4d56a643d9 100644 --- a/website/src/partials/home-cta.md +++ b/website/src/partials/home-cta.md @@ -24,7 +24,8 @@ import { useShape } from '@electric-sql/react' const Component = () => { const { data } = useShape({ - url: `${BASE_URL}/v1/shape/items` + url: `${BASE_URL}/v1/shape`, + table: `items` }) return ( @@ -87,4 +88,4 @@ to syncing into a local embedded theme="alt" /> - \ No newline at end of file + diff --git a/website/src/partials/sync-into-pglite.tsx b/website/src/partials/sync-into-pglite.tsx index 6e94428978..f81f9b309e 100644 --- a/website/src/partials/sync-into-pglite.tsx +++ b/website/src/partials/sync-into-pglite.tsx @@ -21,7 +21,8 @@ await pg.exec(` // Establish a persistent shape subscription await pg.electric.syncShapeToTable({ - url: `${BASE_URL}/v1/shape/items`, + // TODO update this when the sync plugin is updated. + url: `${BASE_URL}/v1/shape`, table: 'items', primaryKey: ['id'], }) @@ -34,6 +35,6 @@ const Component = () => { ) return ( -
{ JSON.stringify(items) }
-  )
-}
\ No newline at end of file
+    
{JSON.stringify(items)}
+      )
+}
diff --git a/website/use-cases/state-transfer.md b/website/use-cases/state-transfer.md
index afa83c9a30..59c84ac04b 100644
--- a/website/use-cases/state-transfer.md
+++ b/website/use-cases/state-transfer.md
@@ -88,7 +88,8 @@ import { useShape } from '@electric-sql/react'
 
 const Component = () => {
   const { data } = useShape({
-    url: `${BASE_URL}/v1/shape/items`
+    url: `${BASE_URL}/v1/shape`,
+    table: `items`
   })
 
   return (