diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 462d9be..18268c8 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -23,8 +23,12 @@ jobs: - name: Build run: dotnet build --configuration Release --no-restore - - name: Initialize Testing Stack - run: docker compose up -d + - uses: supabase/setup-cli@v1 + with: + version: latest + + - name: Start Supabase + run: supabase start - name: Test run: dotnet test --no-restore diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 33cbbe9..5ecff06 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -3,10 +3,29 @@ name: Publish NuGet Package on: push: branches: - - release/* # Default release branch + - master + workflow_run: + workflows: [ build-and-test ] + types: + - completed jobs: + release-please: + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + issues: write + steps: + - uses: googleapis/release-please-action@v4 + with: + target-branch: ${{ github.ref_name }} + manifest-file: .release-please-manifest.json + config-file: release-please-config.json + publish: + needs: release-please + if: ${{ github.repository_owner == 'supabase-community' && startsWith(github.event.head_commit.message, 'chore(master)') && github.ref == 'refs/heads/master' && github.event_name == 'push' }} name: build, pack & publish runs-on: ubuntu-latest steps: diff --git a/.release-please-manifest.json b/.release-please-manifest.json new file mode 100644 index 0000000..3b4c2d4 --- /dev/null +++ b/.release-please-manifest.json @@ -0,0 +1,3 @@ +{ + ".": "4.1.0" +} \ No newline at end of file diff --git a/Postgrest/Postgrest.csproj b/Postgrest/Postgrest.csproj index 30f04ab..afb590b 100644 --- a/Postgrest/Postgrest.csproj +++ b/Postgrest/Postgrest.csproj @@ -22,8 +22,10 @@ https://avatars.githubusercontent.com/u/54469796?s=200&v=4 supabase,postgrest + 4.1.0 4.1.0 + true icon.png README.md @@ -35,7 +37,7 @@ - 4.1.0 + 4.1.0 $(VersionPrefix)-$(VersionSuffix) $(VersionPrefix) diff --git a/PostgrestTests/ClientTests.cs b/PostgrestTests/ClientTests.cs index bf6e08f..4a0b8d6 100644 --- a/PostgrestTests/ClientTests.cs +++ b/PostgrestTests/ClientTests.cs @@ -19,7 +19,7 @@ namespace PostgrestTests [TestClass] public class ClientTests { - private const string BaseUrl = "http://localhost:3000"; + private const string BaseUrl = "http://localhost:54321/rest/v1"; [TestMethod("Initializes")] public void TestInitialization() diff --git a/PostgrestTests/CoercionTests.cs b/PostgrestTests/CoercionTests.cs index bf92952..e94a53f 100644 --- a/PostgrestTests/CoercionTests.cs +++ b/PostgrestTests/CoercionTests.cs @@ -11,7 +11,7 @@ namespace PostgrestTests; [TestClass] public class CoercionTests { - private const string BaseUrl = "http://localhost:3000"; + private const string BaseUrl = "http://localhost:54321/rest/v1"; [TestMethod("Coercion: Can coerce primitive types")] public async Task CanCoercePrimitiveTypes() diff --git a/PostgrestTests/Helpers.cs b/PostgrestTests/Helpers.cs index f47ec40..4cc3dff 100644 --- a/PostgrestTests/Helpers.cs +++ b/PostgrestTests/Helpers.cs @@ -25,9 +25,9 @@ internal static Client GetHostedClient() internal static Client GetLocalClient() { var url = Environment.GetEnvironmentVariable("SUPABASE_URL"); - if (url == null) url = "http://localhost:3000"; + if (url == null) url = "http://localhost:54321"; var publicKey = Environment.GetEnvironmentVariable("SUPABASE_PUBLIC_KEY"); - if (publicKey == null) publicKey = "reallyreallyreallyreallyverysafe"; + if (publicKey == null) publicKey = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0"; var client = new Client($"{url}/rest/v1", new ClientOptions { diff --git a/PostgrestTests/LinqTests.cs b/PostgrestTests/LinqTests.cs index d65eba8..8b68b94 100644 --- a/PostgrestTests/LinqTests.cs +++ b/PostgrestTests/LinqTests.cs @@ -13,7 +13,7 @@ namespace PostgrestTests [TestClass] public class LinqTests { - private const string BaseUrl = "http://localhost:3000"; + private const string BaseUrl = "http://localhost:54321/rest/v1"; [TestMethod("Linq: Select")] public async Task TestLinqSelect() @@ -123,14 +123,17 @@ await client.Table() .Get(); await client.Table() + .Where(x => x.DateTimeValue == null) .Set(x => x.BooleanValue!, true) .Update(); await client.Table() + .Where(x => x.DateTimeValue == null) .Set(x => x.BooleanValue, true) .Update(); await client.Table() + .Where(x => x.DateTimeValue == null) .Set(x => x.StringValue!, null) .Update(); } diff --git a/PostgrestTests/Models/Category.cs b/PostgrestTests/Models/Category.cs new file mode 100644 index 0000000..b940bc9 --- /dev/null +++ b/PostgrestTests/Models/Category.cs @@ -0,0 +1,15 @@ +using System; +using Supabase.Postgrest.Attributes; +using Supabase.Postgrest.Models; + +namespace PostgrestTests.Models; + +[Table("category")] +public class Category : BaseModel +{ + [PrimaryKey("id")] + public Guid Id { get; set; } + + [Column("name")] + public string? Name { get; set; } +} \ No newline at end of file diff --git a/PostgrestTests/Models/Product.cs b/PostgrestTests/Models/Product.cs new file mode 100644 index 0000000..e652156 --- /dev/null +++ b/PostgrestTests/Models/Product.cs @@ -0,0 +1,19 @@ +using System; +using Supabase.Postgrest.Attributes; +using Supabase.Postgrest.Models; + +namespace PostgrestTests.Models; + +[Table("product")] +public class Product : BaseModel +{ + + [PrimaryKey("id")] + public Guid Id { get; set; } + + [Column("name")] + public string? Name { get; set; } + + [Reference(typeof(Category))] + public Category? Category { get; set; } +} \ No newline at end of file diff --git a/PostgrestTests/ReferenceTests.cs b/PostgrestTests/ReferenceTests.cs index edeed32..2dcf604 100644 --- a/PostgrestTests/ReferenceTests.cs +++ b/PostgrestTests/ReferenceTests.cs @@ -12,7 +12,7 @@ namespace PostgrestTests [TestClass] public class ReferenceTests { - private const string BaseUrl = "http://localhost:3000"; + private const string BaseUrl = "http://localhost:54321/rest/v1"; [TestMethod("Reference: Returns linked models on a root model.")] public async Task TestReferenceReturnsLinkedModels() @@ -36,6 +36,7 @@ public async Task TestReferenceReturnsLinkedModels() .Single(); Assert.IsNotNull(person2?.Profile); + Assert.IsTrue(person2.Profile.Email!.Contains("bob")); var byEmail = await client.Table() .Order(x => x.CreatedAt, Ordering.Ascending) @@ -43,6 +44,26 @@ public async Task TestReferenceReturnsLinkedModels() .Single(); Assert.IsNotNull(byEmail); + + var product = await client.Table() + .Filter("id", Operator.Equals, "8b8e89a0-63c7-4917-8dc1-7797dc0285f1") + .Single(); + + Assert.IsNotNull(product); + Assert.AreEqual("product 1", product.Name); + + Assert.IsNotNull(product.Category); + Assert.AreEqual("999e4b26-91a8-4ea4-af2c-77a3540f7843", product.Category.Id.ToString()); + Assert.AreEqual("category 1", product.Category.Name); + + var products = await client.Table() + .Get(); + Assert.IsNotNull(products.Models); + Assert.IsTrue(products.Models.Count == 3); + + var productFiltered = products.Models.Find(x => x.Id.ToString() == "8b8e89a0-63c7-4917-8dc1-7797dc0285f1"); + Assert.AreEqual("999e4b26-91a8-4ea4-af2c-77a3540f7843", productFiltered?.Category?.Id.ToString()); + Assert.AreEqual("category 1", productFiltered?.Category?.Name); } [TestMethod("Reference: Can create linked records.")] diff --git a/PostgrestTests/TableWithCacheTests.cs b/PostgrestTests/TableWithCacheTests.cs index b62159f..b5381ae 100644 --- a/PostgrestTests/TableWithCacheTests.cs +++ b/PostgrestTests/TableWithCacheTests.cs @@ -62,7 +62,7 @@ public Task Empty() [TestClass] public class TableWithCacheTests { - private const string BaseUrl = "http://localhost:3000"; + private const string BaseUrl = "http://localhost:54321/rest/v1"; [TestMethod("Table: Can construct with Caching Provider and raise events.")] public async Task TestCacheWorksWithGetRequests() diff --git a/README.md b/README.md index 5f831ac..34fde88 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,7 @@ -

- -

+# Supabase.Postgrest -

- - - - -

+[![Build and Test](https://github.com/supabase-community/postgrest-csharp/actions/workflows/build-and-test.yml/badge.svg)](https://github.com/supabase-community/postgrest-csharp/acionts/workflows/build-and-test.yml) +[![NuGet](https://img.shields.io/nuget/vpre/Supabase.Postgrest)](https://www.nuget.org/packages/Supabase.Postgrest/) --- @@ -32,7 +26,7 @@ await client.Table() --- -Documentation can be found [here](https://supabase-community.github.io/postgrest-csharp/api/Postgrest.html). +Documentation can be found [here](https://supabase-community.github.io/postgrest-csharp/api/Supabase.Postgrest.html). Postgrest-csharp is written primarily as a helper library for [supabase/supabase-csharp](https://github.com/supabase/supabase-csharp), however, it should be easy enough to use diff --git a/release-please-config.json b/release-please-config.json new file mode 100644 index 0000000..a7bfbe4 --- /dev/null +++ b/release-please-config.json @@ -0,0 +1,16 @@ +{ + "packages": { + ".": { + "changelog-path": "CHANGELOG.md", + "bump-minor-pre-major": false, + "bump-patch-for-minor-pre-major": false, + "draft": false, + "prerelease": false, + "release-type": "simple", + "extra-files": [ + "Postgrest/Postgrest.csproj" + ] + } + }, + "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json" +} \ No newline at end of file diff --git a/supabase/.gitignore b/supabase/.gitignore new file mode 100644 index 0000000..ad9264f --- /dev/null +++ b/supabase/.gitignore @@ -0,0 +1,8 @@ +# Supabase +.branches +.temp + +# dotenvx +.env.keys +.env.local +.env.*.local diff --git a/supabase/config.toml b/supabase/config.toml new file mode 100644 index 0000000..82070a8 --- /dev/null +++ b/supabase/config.toml @@ -0,0 +1,315 @@ +# For detailed configuration reference documentation, visit: +# https://supabase.com/docs/guides/local-development/cli/config +# A string used to distinguish different Supabase projects on the same host. Defaults to the +# working directory name when running `supabase init`. +project_id = "postgrest-csharp" + +[api] +enabled = true +# Port to use for the API URL. +port = 54321 +# Schemas to expose in your API. Tables, views and stored procedures in this schema will get API +# endpoints. `public` and `graphql_public` schemas are included by default. +schemas = ["public", "graphql_public", "personal"] +# Extra schemas to add to the search_path of every request. +extra_search_path = ["public", "extensions"] +# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size +# for accidental or malicious requests. +max_rows = 1000 + +[api.tls] +# Enable HTTPS endpoints locally using a self-signed certificate. +enabled = false + +[db] +# Port to use for the local database URL. +port = 54322 +# Port used by db diff command to initialize the shadow database. +shadow_port = 54320 +# The database major version to use. This has to be the same as your remote database's. Run `SHOW +# server_version;` on the remote database to check. +major_version = 15 + +[db.pooler] +enabled = false +# Port to use for the local connection pooler. +port = 54329 +# Specifies when a server connection can be reused by other clients. +# Configure one of the supported pooler modes: `transaction`, `session`. +pool_mode = "transaction" +# How many server connections to allow per user/database pair. +default_pool_size = 20 +# Maximum number of client connections allowed. +max_client_conn = 100 + +# [db.vault] +# secret_key = "env(SECRET_VALUE)" + +[db.migrations] +# Specifies an ordered list of schema files that describe your database. +# Supports glob patterns relative to supabase directory: "./schemas/*.sql" +schema_paths = [] + +[db.seed] +# If enabled, seeds the database after migrations during a db reset. +enabled = true +# Specifies an ordered list of seed files to load during db reset. +# Supports glob patterns relative to supabase directory: "./seeds/*.sql" +sql_paths = ["./seed.sql"] + +[realtime] +enabled = true +# Bind realtime via either IPv4 or IPv6. (default: IPv4) +# ip_version = "IPv6" +# The maximum length in bytes of HTTP request headers. (default: 4096) +# max_header_length = 4096 + +[studio] +enabled = true +# Port to use for Supabase Studio. +port = 54323 +# External URL of the API server that frontend connects to. +api_url = "http://127.0.0.1" +# OpenAI API Key to use for Supabase AI in the Supabase Studio. +openai_api_key = "env(OPENAI_API_KEY)" + +# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they +# are monitored, and you can view the emails that would have been sent from the web interface. +[inbucket] +enabled = true +# Port to use for the email testing server web interface. +port = 54324 +# Uncomment to expose additional ports for testing user applications that send emails. +# smtp_port = 54325 +# pop3_port = 54326 +# admin_email = "admin@email.com" +# sender_name = "Admin" + +[storage] +enabled = true +# The maximum file size allowed (e.g. "5MB", "500KB"). +file_size_limit = "50MiB" + +# Image transformation API is available to Supabase Pro plan. +# [storage.image_transformation] +# enabled = true + +# Uncomment to configure local storage buckets +# [storage.buckets.images] +# public = false +# file_size_limit = "50MiB" +# allowed_mime_types = ["image/png", "image/jpeg"] +# objects_path = "./images" + +[auth] +enabled = true +# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used +# in emails. +site_url = "http://127.0.0.1:3000" +# A list of *exact* URLs that auth providers are permitted to redirect to post authentication. +additional_redirect_urls = ["https://127.0.0.1:3000"] +# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week). +jwt_expiry = 3600 +# If disabled, the refresh token will never expire. +enable_refresh_token_rotation = true +# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds. +# Requires enable_refresh_token_rotation = true. +refresh_token_reuse_interval = 10 +# Allow/disallow new user signups to your project. +enable_signup = true +# Allow/disallow anonymous sign-ins to your project. +enable_anonymous_sign_ins = false +# Allow/disallow testing manual linking of accounts +enable_manual_linking = false +# Passwords shorter than this value will be rejected as weak. Minimum 6, recommended 8 or more. +minimum_password_length = 6 +# Passwords that do not meet the following requirements will be rejected as weak. Supported values +# are: `letters_digits`, `lower_upper_letters_digits`, `lower_upper_letters_digits_symbols` +password_requirements = "" + +[auth.rate_limit] +# Number of emails that can be sent per hour. Requires auth.email.smtp to be enabled. +email_sent = 2 +# Number of SMS messages that can be sent per hour. Requires auth.sms to be enabled. +sms_sent = 30 +# Number of anonymous sign-ins that can be made per hour per IP address. Requires enable_anonymous_sign_ins = true. +anonymous_users = 30 +# Number of sessions that can be refreshed in a 5 minute interval per IP address. +token_refresh = 150 +# Number of sign up and sign-in requests that can be made in a 5 minute interval per IP address (excludes anonymous users). +sign_in_sign_ups = 30 +# Number of OTP / Magic link verifications that can be made in a 5 minute interval per IP address. +token_verifications = 30 +# Number of Web3 logins that can be made in a 5 minute interval per IP address. +web3 = 30 + +# Configure one of the supported captcha providers: `hcaptcha`, `turnstile`. +# [auth.captcha] +# enabled = true +# provider = "hcaptcha" +# secret = "" + +[auth.email] +# Allow/disallow new user signups via email to your project. +enable_signup = true +# If enabled, a user will be required to confirm any email change on both the old, and new email +# addresses. If disabled, only the new email is required to confirm. +double_confirm_changes = true +# If enabled, users need to confirm their email address before signing in. +enable_confirmations = false +# If enabled, users will need to reauthenticate or have logged in recently to change their password. +secure_password_change = false +# Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email. +max_frequency = "1s" +# Number of characters used in the email OTP. +otp_length = 6 +# Number of seconds before the email OTP expires (defaults to 1 hour). +otp_expiry = 3600 + +# Use a production-ready SMTP server +# [auth.email.smtp] +# enabled = true +# host = "smtp.sendgrid.net" +# port = 587 +# user = "apikey" +# pass = "env(SENDGRID_API_KEY)" +# admin_email = "admin@email.com" +# sender_name = "Admin" + +# Uncomment to customize email template +# [auth.email.template.invite] +# subject = "You have been invited" +# content_path = "./supabase/templates/invite.html" + +[auth.sms] +# Allow/disallow new user signups via SMS to your project. +enable_signup = false +# If enabled, users need to confirm their phone number before signing in. +enable_confirmations = false +# Template for sending OTP to users +template = "Your code is {{ .Code }}" +# Controls the minimum amount of time that must pass before sending another sms otp. +max_frequency = "5s" + +# Use pre-defined map of phone number to OTP for testing. +# [auth.sms.test_otp] +# 4152127777 = "123456" + +# Configure logged in session timeouts. +# [auth.sessions] +# Force log out after the specified duration. +# timebox = "24h" +# Force log out if the user has been inactive longer than the specified duration. +# inactivity_timeout = "8h" + +# This hook runs before a token is issued and allows you to add additional claims based on the authentication method used. +# [auth.hook.custom_access_token] +# enabled = true +# uri = "pg-functions:////" + +# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`. +[auth.sms.twilio] +enabled = false +account_sid = "" +message_service_sid = "" +# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead: +auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)" + +# Multi-factor-authentication is available to Supabase Pro plan. +[auth.mfa] +# Control how many MFA factors can be enrolled at once per user. +max_enrolled_factors = 10 + +# Control MFA via App Authenticator (TOTP) +[auth.mfa.totp] +enroll_enabled = false +verify_enabled = false + +# Configure MFA via Phone Messaging +[auth.mfa.phone] +enroll_enabled = false +verify_enabled = false +otp_length = 6 +template = "Your code is {{ .Code }}" +max_frequency = "5s" + +# Configure MFA via WebAuthn +# [auth.mfa.web_authn] +# enroll_enabled = true +# verify_enabled = true + +# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`, +# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`, +# `twitter`, `slack`, `spotify`, `workos`, `zoom`. +[auth.external.apple] +enabled = false +client_id = "" +# DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead: +secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)" +# Overrides the default auth redirectUrl. +redirect_uri = "" +# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure, +# or any other third-party OIDC providers. +url = "" +# If enabled, the nonce check will be skipped. Required for local sign in with Google auth. +skip_nonce_check = false + +# Allow Solana wallet holders to sign in to your project via the Sign in with Solana (SIWS, EIP-4361) standard. +# You can configure "web3" rate limit in the [auth.rate_limit] section and set up [auth.captcha] if self-hosting. +[auth.web3.solana] +enabled = false + +# Use Firebase Auth as a third-party provider alongside Supabase Auth. +[auth.third_party.firebase] +enabled = false +# project_id = "my-firebase-project" + +# Use Auth0 as a third-party provider alongside Supabase Auth. +[auth.third_party.auth0] +enabled = false +# tenant = "my-auth0-tenant" +# tenant_region = "us" + +# Use AWS Cognito (Amplify) as a third-party provider alongside Supabase Auth. +[auth.third_party.aws_cognito] +enabled = false +# user_pool_id = "my-user-pool-id" +# user_pool_region = "us-east-1" + +# Use Clerk as a third-party provider alongside Supabase Auth. +[auth.third_party.clerk] +enabled = false +# Obtain from https://clerk.com/setup/supabase +# domain = "example.clerk.accounts.dev" + +[edge_runtime] +enabled = true +# Configure one of the supported request policies: `oneshot`, `per_worker`. +# Use `oneshot` for hot reload, or `per_worker` for load testing. +policy = "oneshot" +# Port to attach the Chrome inspector for debugging edge functions. +inspector_port = 8083 +# The Deno major version to use. +deno_version = 1 + +# [edge_runtime.secrets] +# secret_key = "env(SECRET_VALUE)" + +[analytics] +enabled = true +port = 54327 +# Configure one of the supported backends: `postgres`, `bigquery`. +backend = "postgres" + +# Experimental features may be deprecated any time +[experimental] +# Configures Postgres storage engine to use OrioleDB (S3) +orioledb_version = "" +# Configures S3 bucket URL, eg. .s3-.amazonaws.com +s3_host = "env(S3_HOST)" +# Configures S3 bucket region, eg. us-east-1 +s3_region = "env(S3_REGION)" +# Configures AWS_ACCESS_KEY_ID for S3 bucket +s3_access_key = "env(S3_ACCESS_KEY)" +# Configures AWS_SECRET_ACCESS_KEY for S3 bucket +s3_secret_key = "env(S3_SECRET_KEY)" diff --git a/supabase/migrations/1749515112_schema.sql b/supabase/migrations/1749515112_schema.sql new file mode 100644 index 0000000..72cb34f --- /dev/null +++ b/supabase/migrations/1749515112_schema.sql @@ -0,0 +1,197 @@ +-- Create a second schema +CREATE SCHEMA personal; + +-- USERS +CREATE TYPE public.user_status AS ENUM ('ONLINE', 'OFFLINE'); +CREATE TABLE public.users +( + username text primary key, + inserted_at timestamp without time zone DEFAULT timezone('utc'::text, now()) NOT NULL, + updated_at timestamp without time zone DEFAULT timezone('utc'::text, now()) NOT NULL, + favorite_numbers int[] DEFAULT null, + favorite_name text UNIQUE null, + data jsonb DEFAULT null, + age_range int4range DEFAULT null, + status user_status DEFAULT 'ONLINE'::public.user_status, + catchphrase tsvector DEFAULT null +); +ALTER TABLE public.users + REPLICA IDENTITY FULL; -- Send "previous data" to supabase +COMMENT ON COLUMN public.users.data IS 'For unstructured data and prototyping.'; + +-- CHANNELS +CREATE TABLE public.channels +( + id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + inserted_at timestamp without time zone DEFAULT timezone('utc'::text, now()) NOT NULL, + updated_at timestamp without time zone DEFAULT timezone('utc'::text, now()) NOT NULL, + data jsonb DEFAULT null, + slug text +); +ALTER TABLE public.users + REPLICA IDENTITY FULL; -- Send "previous data" to supabase +COMMENT ON COLUMN public.channels.data IS 'For unstructured data and prototyping.'; + +-- MESSAGES +CREATE TABLE public.messages +( + id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + inserted_at timestamp without time zone DEFAULT timezone('utc'::text, now()) NOT NULL, + updated_at timestamp without time zone DEFAULT timezone('utc'::text, now()) NOT NULL, + data jsonb DEFAULT null, + message text, + username text REFERENCES users NOT NULL, + channel_id bigint REFERENCES channels NOT NULL +); +ALTER TABLE public.messages + REPLICA IDENTITY FULL; -- Send "previous data" to supabase +COMMENT ON COLUMN public.messages.data IS 'For unstructured data and prototyping.'; + +create table "public"."kitchen_sink" +( + "id" uuid DEFAULT gen_random_uuid() PRIMARY KEY, + "string_value" varchar(255) null, + "bool_value" BOOL DEFAULT false, + "unique_value" varchar(255) UNIQUE, + "int_value" INT null, + "long_value" BIGINT null, + "float_value" FLOAT null, + "double_value" DOUBLE PRECISION null, + "datetime_value" timestamp null, + "datetime_value_1" timestamp null, + "datetime_pos_infinite_value" timestamp null, + "datetime_neg_infinite_value" timestamp null, + "list_of_strings" TEXT[] null, + "list_of_datetimes" DATE[] null, + "list_of_ints" INT[] null, + "list_of_floats" FLOAT[] null, + "int_range" INT4RANGE null, + "uuidv4" uuid null +); + +CREATE TABLE public.movie +( + id uuid DEFAULT gen_random_uuid() PRIMARY KEY, + created_at timestamp without time zone NOT NULL DEFAULT now(), + name character varying(255) NULL, + status character varying(255) NULL +); + +CREATE TABLE public.person +( + id uuid DEFAULT gen_random_uuid() PRIMARY KEY, + created_at timestamp without time zone NOT NULL DEFAULT now(), + first_name character varying(255) NULL, + last_name character varying(255) NULL +); + +CREATE TABLE public.profile +( + person_id uuid PRIMARY KEY references person (id), + email character varying(255) null, + created_at timestamp without time zone NOT NULL DEFAULT now() +); + +CREATE TABLE public.movie_person +( + movie_id uuid references movie (id), + person_id uuid references person (id), + primary key (movie_id, person_id) +); + +CREATE TABLE public.category +( + id uuid PRIMARY KEY, + name character varying(255) null, + created_at timestamp without time zone NOT NULL DEFAULT now() +); + +CREATE TABLE public.product +( + id uuid PRIMARY KEY , + name character varying(255) null, + category_id uuid references category (id), + created_at timestamp without time zone NOT NULL DEFAULT now() +); + +-- STORED FUNCTION +CREATE FUNCTION public.get_status(name_param text) + RETURNS user_status AS +$$ +SELECT status +from users +WHERE username = name_param; +$$ LANGUAGE SQL IMMUTABLE; + +-- STORED FUNCTION WITH ROW PARAMETER +CREATE FUNCTION public.get_data(param public.users) + RETURNS public.users.data%TYPE AS +$$ +SELECT data +from users u +WHERE u.username = param.username; +$$ LANGUAGE SQL IMMUTABLE; + +-- SECOND SCHEMA USERS +CREATE TYPE personal.user_status AS ENUM ('ONLINE', 'OFFLINE'); +CREATE TABLE personal.users +( + username text primary key, + inserted_at timestamp without time zone DEFAULT timezone('utc'::text, now()) NOT NULL, + updated_at timestamp without time zone DEFAULT timezone('utc'::text, now()) NOT NULL, + data jsonb DEFAULT null, + age_range int4range DEFAULT null, + status user_status DEFAULT 'ONLINE'::public.user_status +); + +-- SECOND SCHEMA STORED FUNCTION +CREATE FUNCTION personal.get_status(name_param text) + RETURNS user_status AS +$$ +SELECT status +from users +WHERE username = name_param; +$$ LANGUAGE SQL IMMUTABLE; + +-- SECOND SCHEMA STORED FUNCTION WITH ROW PARAMETER +CREATE FUNCTION personal.get_data(param personal.users) + RETURNS personal.users.data%TYPE AS +$$ +SELECT data +from users u +WHERE u.username = param.username; +$$ LANGUAGE SQL IMMUTABLE; + +create table public.foreign_key_test +( + "id" serial primary key, + "movie_fk_1" UUID null, + "movie_fk_2" UUID null, + "random_person_fk" UUID NULL +); + +ALTER TABLE "public"."foreign_key_test" + ADD CONSTRAINT "foreign_key_test_relation_1" FOREIGN KEY ("movie_fk_1") REFERENCES "public"."movie" ("id") ON UPDATE CASCADE ON DELETE CASCADE; +ALTER TABLE "public"."foreign_key_test" + ADD CONSTRAINT "foreign_key_test_relation_2" FOREIGN KEY ("movie_fk_2") REFERENCES "public"."movie" ("id") ON UPDATE CASCADE ON DELETE CASCADE; +ALTER TABLE "public"."foreign_key_test" + ADD CONSTRAINT "foreign_key_random_person_fk" FOREIGN KEY ("random_person_fk") REFERENCES "public"."person" ("id") ON UPDATE CASCADE ON DELETE CASCADE; + +create table "public"."nested_foreign_key_test" +( + "id" serial primary key, + "foreign_key_test_fk" INT null, + "user_fk" varchar(255) null +); + +ALTER TABLE "public"."nested_foreign_key_test" + ADD CONSTRAINT "nested_foreign_key_test_relation_1" FOREIGN KEY ("foreign_key_test_fk") REFERENCES "public"."foreign_key_test" ("id") ON UPDATE CASCADE ON DELETE CASCADE; +ALTER TABLE "public"."nested_foreign_key_test" + ADD CONSTRAINT "nested_foreign_key_test_relation_2" FOREIGN KEY ("user_fk") REFERENCES "public"."users" ("username") ON UPDATE CASCADE ON DELETE CASCADE; + +grant usage on schema personal to postgres, anon, authenticated, service_role; +alter default privileges in schema personal grant all on tables to postgres, anon, authenticated, service_role; +alter default privileges in schema personal grant all on functions to postgres, anon, authenticated, service_role; +alter default privileges in schema personal grant all on sequences to postgres, anon, authenticated, service_role; + +GRANT ALL PRIVILEGES ON TABLE personal.users TO postgres, anon, authenticated, service_role; \ No newline at end of file diff --git a/supabase/migrations/1749515175_dummy_data.sql b/supabase/migrations/1749515175_dummy_data.sql new file mode 100644 index 0000000..4a9d82b --- /dev/null +++ b/supabase/migrations/1749515175_dummy_data.sql @@ -0,0 +1,107 @@ +INSERT INTO public.users (username, status, age_range, catchphrase) +VALUES ('supabot', 'ONLINE', '[1,2)'::int4range, 'fat cat'::tsvector), + ('kiwicopple', 'OFFLINE', '[25,35)'::int4range, 'cat bat'::tsvector), + ('awailas', 'ONLINE', '[25,35)'::int4range, 'bat rat'::tsvector), + ('acupofjose', 'OFFLINE', '[25,35)'::int4range, 'bat rat'::tsvector), + ('dragarcia', 'ONLINE', '[20,30)'::int4range, 'rat fat'::tsvector); + +INSERT INTO public.channels (slug) +VALUES ('public'), + ('random'); + +INSERT INTO public.messages (message, channel_id, username) +VALUES ('Hello World 👋', 1, 'supabot'), + ('Perfection is attained, not when there is nothing more to add, but when there is nothing left to take away.', + 2, 'supabot'); + +INSERT INTO personal.users (username, status, age_range) +VALUES ('supabot', 'ONLINE', '[1,2)'::int4range), + ('kiwicopple', 'OFFLINE', '[25,35)'::int4range), + ('awailas', 'ONLINE', '[25,35)'::int4range), + ('dragarcia', 'ONLINE', '[20,30)'::int4range), + ('leroyjenkins', 'OFFLINE', '[20,40)'::int4range); + +INSERT INTO public.kitchen_sink (id, + string_value, + bool_value, + int_value, + long_value, + float_value, + double_value, + datetime_value, + datetime_value_1, + datetime_pos_infinite_value, + datetime_neg_infinite_value, + list_of_strings, + list_of_datetimes, + list_of_ints, + list_of_floats, + int_range) + +VALUES ('f3ff356d-5803-43a7-b125-ba10cf10fdcd', + 'Im the Kitchen Sink!', + false, + 99999, + 2147483648, + '99999.0'::float4, + '99999.0'::float8, + 'Tue May 24 06:30:00 2022'::timestamp, + 'Tue May 20 06:00:00 2022'::timestamp, + 'Infinity', + '-infinity', + '{"set", "of", "strings"}', + '{NOW()}', + '{10, 20, 30, 40}', + '{10.0, 12.0}', + '[20,50]'::int4range); + + +insert into "public"."movie" ("created_at", "id", "name", "status") +values ('2022-08-20 00:29:45.400188', 'ea07bd86-a507-4c68-9545-b848bfe74c90', 'Top Gun: Maverick', 'OnDisplay'); +insert into "public"."movie" ("created_at", "id", "name", "status") +values ('2022-08-22 00:29:45.400188', 'a972a8f6-2e23-4172-be8d-7b65470ca0f4', 'Mad Max: Fury Road', 'OnDisplay'); +insert into "public"."movie" ("created_at", "id", "name", "status") +values ('2022-08-28 00:29:45.400188', '42fd15b1-3bff-431d-9fa5-314289beb246', 'Guns Away', 'OffDisplay'); + + +insert into "public"."person" ("created_at", "first_name", "id", "last_name") +values ('2022-08-20 00:30:02.120528', 'Tom', 'd53072eb-5e64-4e9c-8a29-3ed07076fb2f', 'Cruise'); +insert into "public"."person" ("created_at", "first_name", "id", "last_name") +values ('2022-08-20 00:30:02.120528', 'Tom', 'b76776ac-75ba-424f-b5bc-6cb85c2d2bbf', 'Holland'); +insert into "public"."person" ("created_at", "first_name", "id", "last_name") +values ('2022-08-20 00:30:33.72443', 'Bob', '6f06c038-38e0-4a39-8aac-2c5e8597856e', 'Saggett'); +insert into "public"."person" ("created_at", "first_name", "id", "last_name") +values ('2022-08-20 00:30:33.72443', 'Random', 'd948ca02-c432-470e-9fe5-738269491762', 'Actor'); + + +insert into "public"."profile" ("created_at", "email", "person_id") +values ('2022-08-20 00:30:33.72443', 'tom.cruise@supabase.io', 'd53072eb-5e64-4e9c-8a29-3ed07076fb2f'); +insert into "public"."profile" ("created_at", "email", "person_id") +values ('2022-08-20 00:30:33.72443', 'tom.holland@supabase.io', 'b76776ac-75ba-424f-b5bc-6cb85c2d2bbf'); +insert into "public"."profile" ("created_at", "email", "person_id") +values ('2022-08-20 00:30:33.72443', 'bob.saggett@supabase.io', '6f06c038-38e0-4a39-8aac-2c5e8597856e'); + +insert into "public"."movie_person" ("movie_id", "person_id") +values ('ea07bd86-a507-4c68-9545-b848bfe74c90', 'd53072eb-5e64-4e9c-8a29-3ed07076fb2f'); +insert into "public"."movie_person" ("movie_id", "person_id") +values ('a972a8f6-2e23-4172-be8d-7b65470ca0f4', 'b76776ac-75ba-424f-b5bc-6cb85c2d2bbf'); +insert into "public"."movie_person" ("movie_id", "person_id") +values ('ea07bd86-a507-4c68-9545-b848bfe74c90', '6f06c038-38e0-4a39-8aac-2c5e8597856e'); +insert into "public"."movie_person" ("movie_id", "person_id") +values ('42fd15b1-3bff-431d-9fa5-314289beb246', 'd948ca02-c432-470e-9fe5-738269491762'); + +insert into "public"."foreign_key_test" ("movie_fk_1", "movie_fk_2", "random_person_fk") +values ('ea07bd86-a507-4c68-9545-b848bfe74c90', 'ea07bd86-a507-4c68-9545-b848bfe74c90', + 'd53072eb-5e64-4e9c-8a29-3ed07076fb2f'); + +insert into "public"."nested_foreign_key_test" ("foreign_key_test_fk", "user_fk") +values ('1', 'awailas'); + +insert into "public"."category" ("id", "name") +values ('999e4b26-91a8-4ea4-af2c-77a3540f7843', 'category 1'), + ('f15a224d-014d-4c21-a733-e2d0862eafe1', 'category 2'); + +insert into "public"."product" ("id", "name", "category_id") +values ('8b8e89a0-63c7-4917-8dc1-7797dc0285f1', 'product 1', '999e4b26-91a8-4ea4-af2c-77a3540f7843'), + ('db418e89-d472-415f-9bf1-7ae713d83617', 'product 2', '999e4b26-91a8-4ea4-af2c-77a3540f7843'), + ('f98aadbe-5796-44f8-be64-3832424940d4', 'product 3', 'f15a224d-014d-4c21-a733-e2d0862eafe1');