diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..a87a3ef --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,39 @@ +name: ci + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + build: + runs-on: ubuntu-latest + env: + GHCR_USERNAME: ${{ github.actor }} + GHCR_TOKEN: ${{ secrets.GHCR_TOKEN }} + FORCE_COLOR: 1 + steps: + - uses: actions/checkout@v3 + - name: Put back the git branch into git (Earthly uses it for tagging) + run: | + branch="" + if [ -n "$GITHUB_HEAD_REF" ]; then + branch="$GITHUB_HEAD_REF" + else + branch="${GITHUB_REF##*/}" + fi + git checkout -b "$branch" || true + - name: Docker Login + run: docker login https://ghcr.io --username "$GHCR_USERNAME" --password "$GHCR_TOKEN" + - name: Download latest earthly + run: "sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly'" + + - name: Earthly version + run: earthly --version + + - name: Run CI + run: earthly -P +ci + + - name: Run Tests + run: earthly -P +test diff --git a/README.md b/README.md index 82f7449..33d405b 100644 --- a/README.md +++ b/README.md @@ -1,24 +1,6 @@ -# Supabase Elixir SDK +# Supabase Potion -[![Hex.pm](https://img.shields.io/hexpm/v/supabase_potion.svg)](https://hex.pm/packages/supabase_potion) -[![Downloads](https://img.shields.io/hexpm/dt/supabase_potion.svg)](https://hex.pm/packages/supabase_potion) -[![Documentation](https://img.shields.io/badge/documentation-gray)](https://hexdocs.pm/supabase_potion) -[![lint](https://github.com/zoedsoupe/supabase/actions/workflows/lint.yml/badge.svg)](https://github.com/zoedsoupe/supabase/actions/workflows/lint.yml) -[![test](https://github.com/zoedsoupe/supabase/actions/workflows/test.yml/badge.svg)](https://github.com/zoedsoupe/supabase/actions/workflows/test.yml) - -> Complete SDK and APIs integrations with Supabase - -This monorepo houses the collection of Elixir SDK packages for integrating with Supabase, the open-source Firebase alternative. Our goal is to offer developers a seamless integration experience with Supabase services using Elixir. - -## Packages Overview - -- **Supabase Potion**: Main entrypoint for the Supabase SDK library, providing easy management for Supabase clients and connections. [Guide](./apps/supabase_potion/README.md). -- **Supabase Storage**: Offers developers a way to store large objects like images, videos, and other files. [Guide](./guides/storage.md) -- **Supabase PostgREST**: Directly turns your PostgreSQL database into a RESTful API using PostgREST. [Guide](#) -- **Supabase Realtime**: Provides a realtime websocket API, enabling listening to database changes. [Guide](#) -- **Supabase Auth**: A comprehensive user authentication system, complete with email sign-in, password recovery, session management, and more. [Guide](./apps/supabase_auth/README.md) -- **Supabase UI**: UI components to help build Supabase-powered applications quickly. [Guide](#) -- **Supabase Fetcher**: Customized HTTP client for making requests to Supabase APIs. [Guide](./guides/fetcher.md) +Where the magic starts! ## Getting Started @@ -34,89 +16,81 @@ def deps do end ``` -Then you can each package for the service you want to use. For example, if you want to use only the `Storage` integration: - -```elixir -def deps do - [ - {:supabase_potion, "~> 0.3"}, - {:supabase_storage, "~> 0.2"} - ] -end -``` - -## Starting a Client - -```elixir -iex> Supabase.Client.init_client!(%{conn: %{base_url: "", api_key: ""}, name: MyClient}) -{:ok, #PID<0.123.0>} -``` - -For more information, refer to the [supabase_potion](./apps/supabase_potion/README.md) readme! - -## Supabase Services - -The Supabase Elixir SDK allows you to interact with various Supabase services: - -### Supabase Storage - -Supabase Storage is a service for storing large objects like images, videos, and other files. It provides a simple API with strong consistency, similar to AWS S3. - -### Supabase PostgREST - -PostgREST is a web server that turns your PostgreSQL database into a RESTful API. It automatically generates API endpoints and operations based on your database's structure and permissions. - -### Supabase Realtime +### Clients -Supabase Realtime offers a realtime WebSocket API powered by PostgreSQL notifications. You can use it to listen to changes in your database and receive updates instantly as they happen. +A `Supabase.Client` is an Agent that holds general information about Supabase, that can be used to intereact with any of the children integrations, for example: `Supabase.Storage` or `Supabase.UI`. -### Supabase Auth +`Supabase.Client` is defined as: -Supabase Auth is a comprehensive user authentication system that includes features like email and password sign-in, email verification, password recovery, session management, and more, out of the box. +- `:name` - the name of the client, started by `start_link/1` +- `:conn` - connection information, the only required option as it is vital to the `Supabase.Client`. + - `:base_url` - The base url of the Supabase API, it is usually in the form `https://.supabase.io`. + - `:api_key` - The API key used to authenticate requests to the Supabase API. + - `:access_token` - Token with specific permissions to access the Supabase API, it is usually the same as the API key. +- `:db` - default database options + - `:schema` - default schema to use, defaults to `"public"` +- `:global` - global options config + - `:headers` - additional headers to use on each request +- `:auth` - authentication options + - `:auto_refresh_token` - automatically refresh the token when it expires, defaults to `true` + - `:debug` - enable debug mode, defaults to `false` + - `:detect_session_in_url` - detect session in URL, defaults to `true` + - `:flow_type` - authentication flow type, defaults to `"web"` + - `:persist_session` - persist session, defaults to `true` + - `:storage` - storage type + - `:storage_key` - storage key -### Supabase UI +## Usage -Supabase UI provides a set of UI components to help you build Supabase-powered applications quickly. It's built on top of Tailwind CSS and Headless UI, and it's fully customizable. The package even includes `Phoenix.LiveView` components! +The Supabase Elixir SDK provides a flexible way to manage `Supabase.Client` instances, which can, in turn, manage multiple `Supabase.Client` instances. Here's a brief overview of the key concepts: -### Supabase Fetcher +### Starting a Client -Supabase Fetcher is a customized HTTP client for Supabase, mainly used in Supabase Potion. It gives you complete control over how you make requests to any Supabase API. - -## General Roadmap - -If you want to track integration-specific roadmaps, check their own README. - -- [x] Fetcher to interact with the Supabase API in a low-level way -- [x] Supabase Storage integration -- [ ] Supabase UI for Phoenix Live View -- [ ] Supabase Postgrest integration -- [x] Supabase Auth integration -- [ ] Supabase Realtime API integration +You can start a client using the `Supabase.Client.start_link/1` function. However, it's recommended to use `Supabase.init_client!/1`, which allows you to pass client options and automatically manage `Supabase.Client` processes. +```elixir +iex> Supabase.Client.init_client!(%{conn: %{base_url: "", api_key: ""}}) +{:ok, #PID<0.123.0>} +``` -## Why another Supabase package? +## Configuration -Well, I tried to to use the [supabase-elixir](https://github.com/treebee/supabase-elixir) package but I had some strange behaviour and it didn't match some requirements of my project. So I started to search about Elixir-Supabase integrations and found some old, non-maintained packages that doesn't match some Elixir "idioms" and don't leverage the BEAM for a more integrated experience. +Ensure your Supabase configurations are set: -Also I would like to contribute to OSS in some way and gain more experience with the BEAM and HTTP integrations too. So feel free to not to use, give some counter arguments and also contribute to these packages! +```elixir +import Config -## Credits & Inspirations +config :supabase, + manage_clients: false, + supabase_base_url: System.fetch_env!("SUPABASE_BASE_URL"), + supabase_api_key: System.fetch_env!("SUPABASE_API_KEY"), +``` -- [supabase-elixir](https://github.com/treebee/supabase-elixir) -- [storage-js](https://github.com/supabase/storage-js) +- `supabase_base_url`: The base URL of your Supabase project! More information on how to find it can be seen on the [next section](#how-to-find-my-supabase-base-url?) +- `supabase_api_key`: The secret of your Supabase project! More information on how to find it can be seen on the [next section](#how-to-find-my-supabase-api-key?) +- `manage_clients`: Enable or disable the internal client management by the library. Defauls to `true`, see more on the [library supervisor documentation](https://hexdocs.pm/supabase_potion/Supabase.ClientSupervisor.html). -## Contributing +Make sure to set the environment variables `SUPABASE_BASE_URL` and `SUPABASE_API_KEY`. -Contributions, issues, and feature requests are welcome! For major changes, please open an issue first to discuss what you would like to change. +### How to find my Supabase base URL? -## Acknowledgements +You can find your Supabase base URL in the Settings page of your project. +Firstly select your project from the initial Dashboard. +On the left sidebar, click on the Settings icon, then select API. +The base URL is the first field on the page. -This SDK is a comprehensive representation of Supabase's client integrations. Thanks to the Supabase community for their support and collaboration. +### How to find my Supabase API Key? -## License +You can find your Supabase API key in the Settings page of your project. +Firstly select your project from the initial Dashboard. +On the left sidebar, click on the Settings icon, then select API. +The API key is the second field on the page. -[MIT](LICENSE) +There two types of API keys, the public and the private. The last one +bypass any Row Level Security (RLS) rules you have set up. +So you shouldn't use it in your frontend application. ---- +If you don't know what RLS is, you can read more about it here: +https://supabase.com/docs/guides/auth/row-level-security -With the Supabase Elixir SDK, you have the tools you need to supercharge your Elixir applications by seamlessly integrating them with Supabase's powerful cloud services. Happy coding! 😄 +For most cases you should prefer to use the public "anon" Key. diff --git a/apps/supabase_auth/.formatter.exs b/apps/supabase_auth/.formatter.exs deleted file mode 100644 index d2cda26..0000000 --- a/apps/supabase_auth/.formatter.exs +++ /dev/null @@ -1,4 +0,0 @@ -# Used by "mix format" -[ - inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] -] diff --git a/apps/supabase_auth/.gitignore b/apps/supabase_auth/.gitignore deleted file mode 100644 index ea0e15f..0000000 --- a/apps/supabase_auth/.gitignore +++ /dev/null @@ -1,40 +0,0 @@ -# The directory Mix will write compiled artifacts to. -/_build/ - -# If you run "mix test --cover", coverage assets end up here. -/cover/ - -# The directory Mix downloads your dependencies sources to. -/deps/ - -# Where third-party dependencies like ExDoc output generated docs. -/doc/ - -# Ignore .fetch files in case you like to edit your project deps locally. -/.fetch - -# If the VM crashes, it generates a dump, let's ignore it too. -erl_crash.dump - -# Also ignore archive artifacts (built via "mix archive.build"). -*.ez - -# Temporary files, for example, from tests. -/tmp/ - -# Nix files -/.nix-mix/ -/.postgres/ -result - -# Secrets files -.env - -# LSP files -/.lexical/ - -# Nix files -result - -/.elixir_ls/ -/.elixir-tools/ diff --git a/apps/supabase_auth/LICENSE b/apps/supabase_auth/LICENSE deleted file mode 100644 index d448fa4..0000000 --- a/apps/supabase_auth/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2023 Zoey Pessanha - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. diff --git a/apps/supabase_auth/README.md b/apps/supabase_auth/README.md deleted file mode 100644 index 126afa4..0000000 --- a/apps/supabase_auth/README.md +++ /dev/null @@ -1 +0,0 @@ -# Supabase Auth (GoTrue) diff --git a/apps/supabase_auth/lib/supabase/go_true/admin.ex b/apps/supabase_auth/lib/supabase/go_true/admin.ex deleted file mode 100644 index c975d51..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/admin.ex +++ /dev/null @@ -1,126 +0,0 @@ -defmodule Supabase.GoTrue.Admin do - @moduledoc false - - import Supabase.Client, only: [is_client: 1] - - alias Supabase.Client - alias Supabase.Fetcher - alias Supabase.GoTrue.AdminHandler - alias Supabase.GoTrue.Schemas.AdminUserParams - alias Supabase.GoTrue.Schemas.GenerateLink - alias Supabase.GoTrue.Schemas.InviteUserParams - alias Supabase.GoTrue.Schemas.PaginationParams - alias Supabase.GoTrue.Session - alias Supabase.GoTrue.User - - @behaviour Supabase.GoTrue.AdminBehaviour - - @scopes ~w[global local others]a - - @impl true - def sign_out(client, %Session{} = session, scope) when is_client(client) and scope in @scopes do - with {:ok, client} <- Client.retrieve_client(client) do - case AdminHandler.sign_out(client, session.access_token, scope) do - {:ok, _} -> :ok - {:error, :not_found} -> :ok - {:error, :unauthorized} -> :ok - err -> err - end - end - end - - @impl true - def invite_user_by_email(client, email, options \\ %{}) when is_client(client) do - with {:ok, client} <- Client.retrieve_client(client), - {:ok, options} <- InviteUserParams.parse(options), - {:ok, response} <- AdminHandler.invite_user(client, email, options) do - User.parse(response) - end - end - - @impl true - def generate_link(client, attrs) when is_client(client) do - with {:ok, client} <- Client.retrieve_client(client), - {:ok, params} <- GenerateLink.parse(attrs), - {:ok, response} <- AdminHandler.generate_link(client, params) do - GenerateLink.properties(response) - end - end - - @impl true - def create_user(client, attrs) when is_client(client) do - with {:ok, client} <- Client.retrieve_client(client), - {:ok, params} <- AdminUserParams.parse(attrs), - {:ok, response} <- AdminHandler.create_user(client, params) do - User.parse(response) - end - end - - @impl true - def delete_user(client, user_id, opts \\ [should_soft_delete: false]) when is_client(client) do - with {:ok, client} <- Client.retrieve_client(client), - {:ok, _} <- AdminHandler.delete_user(client, user_id, opts) do - :ok - end - end - - @impl true - def get_user_by_id(client, user_id) when is_client(client) do - with {:ok, client} <- Client.retrieve_client(client), - {:ok, response} <- AdminHandler.get_user(client, user_id) do - User.parse(response) - end - end - - @impl true - def list_users(client, params \\ %{}) when is_client(client) do - with {:ok, client} <- Client.retrieve_client(client), - {:ok, params} <- PaginationParams.page_params(params), - {:ok, response} <- AdminHandler.list_users(client, params), - {:ok, users} <- User.parse_list(response.body["users"]) do - total = Fetcher.get_header(response, "x-total-count") - - links = - response - |> Fetcher.get_header("link", "") - |> String.split(",", trim: true) - - next = parse_next_page_count(links) - last = parse_last_page_count(links) - - attrs = %{next_page: (next != 0 && next) || nil, last_page: last, total: total} - {:ok, pagination} = PaginationParams.pagination(attrs) - - {:ok, users, pagination} - end - end - - @next_page_rg ~r/.+\?page=(\d).+rel=\"next\"/ - @last_page_rg ~r/.+\?page=(\d).+rel=\"last\"/ - - defp parse_next_page_count(links) do - parse_page_count(links, @next_page_rg) - end - - defp parse_last_page_count(links) do - parse_page_count(links, @last_page_rg) - end - - defp parse_page_count(links, regex) do - Enum.reduce_while(links, 0, fn link, acc -> - case Regex.run(regex, link) do - [_, page] -> {:halt, page} - _ -> {:cont, acc} - end - end) - end - - @impl true - def update_user_by_id(client, user_id, attrs) when is_client(client) do - with {:ok, client} <- Client.retrieve_client(client), - {:ok, params} <- AdminUserParams.parse(attrs), - {:ok, response} <- AdminHandler.update_user(client, user_id, params) do - User.parse(response) - end - end -end diff --git a/apps/supabase_auth/lib/supabase/go_true/admin_behaviour.ex b/apps/supabase_auth/lib/supabase/go_true/admin_behaviour.ex deleted file mode 100644 index 7161690..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/admin_behaviour.ex +++ /dev/null @@ -1,22 +0,0 @@ -defmodule Supabase.GoTrue.AdminBehaviour do - @moduledoc false - - alias Supabase.Client - alias Supabase.GoTrue.Session - alias Supabase.GoTrue.User - - @type scope :: :global | :local | :others - @type invite_options :: %{data: map, redirect_to: String.t()} - - @callback sign_out(Client.client(), Session.t(), scope) :: :ok | {:error, atom} - @callback invite_user_by_email(Client.client(), email, invite_options) :: :ok | {:error, atom} - when email: String.t() - @callback generate_link(Client.client(), map) :: {:ok, String.t()} | {:error, atom} - @callback create_user(Client.client(), map) :: {:ok, User.t()} | {:error, atom} - @callback list_users(Client.client()) :: {:ok, list(User.t())} | {:error, atom} - @callback get_user_by_id(Client.client(), Ecto.UUID.t()) :: {:ok, User.t()} | {:error, atom} - @callback update_user_by_id(Client.client(), Ecto.UUID.t(), map) :: - {:ok, User.t()} | {:error, atom} - @callback delete_user(Client.client(), Ecto.UUID.t(), keyword) :: - {:ok, User.t()} | {:error, atom} -end diff --git a/apps/supabase_auth/lib/supabase/go_true/admin_handler.ex b/apps/supabase_auth/lib/supabase/go_true/admin_handler.ex deleted file mode 100644 index b856eab..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/admin_handler.ex +++ /dev/null @@ -1,96 +0,0 @@ -defmodule Supabase.GoTrue.AdminHandler do - @moduledoc false - - alias Supabase.Client - alias Supabase.Fetcher - alias Supabase.GoTrue.Schemas.InviteUserParams - - @invite "/invite" - @generate_link "/admin/generate_link" - @users "/admin/users" - - defp single_user_endpoint(id) do - @users <> "/#{id}" - end - - defp sign_out(scope) do - "/logout?scope=#{scope}" - end - - def sign_out(%Client{} = client, access_token, scope) do - endpoint = Client.retrieve_auth_url(client, sign_out(scope)) - headers = Fetcher.apply_client_headers(client, access_token) - Fetcher.post(endpoint, nil, headers) - end - - def invite_user(%Client{} = client, email, %InviteUserParams{} = opts) do - headers = Fetcher.apply_client_headers(client, nil, %{"redirect_to" => opts.redirect_to}) - body = %{email: email, data: opts.data} - - client - |> Client.retrieve_auth_url(@invite) - |> Fetcher.post(body, headers) - end - - def generate_link(%Client{} = client, %{type: _, redirect_to: redirect_to} = params) do - headers = Fetcher.apply_client_headers(client, nil, %{"redirect_to" => redirect_to}) - - client - |> Client.retrieve_auth_url(@generate_link) - |> Fetcher.post(params, headers) - end - - def create_user(%Client{} = client, params) do - headers = Fetcher.apply_client_headers(client) - - client - |> Client.retrieve_auth_url(@users) - |> Fetcher.post(params, headers) - end - - def delete_user(%Client{} = client, id, params) do - headers = Fetcher.apply_client_headers(client) - body = %{should_soft_delete: params[:should_soft_delete] || false} - uri = single_user_endpoint(id) - - client - |> Client.retrieve_auth_url(uri) - |> Fetcher.delete(body, headers) - end - - def get_user(%Client{} = client, id) do - headers = Fetcher.apply_client_headers(client) - uri = single_user_endpoint(id) - - client - |> Client.retrieve_auth_url(uri) - |> Fetcher.delete(nil, headers) - end - - def list_users(%Client{} = client, params) do - body = %{ - page: to_string(Map.get(params, :page, 0)), - per_page: to_string(Map.get(params, :per_page, 0)) - } - - headers = Fetcher.apply_client_headers(client) - - client - |> Client.retrieve_auth_url(@users) - |> Fetcher.get(body, headers, resolve_json: false) - |> case do - {:ok, resp} when resp.status == 200 -> {:ok, Map.update!(resp, :body, &Jason.decode!/1)} - {:ok, resp} -> {:ok, Fetcher.format_response(resp)} - {:error, _} = err -> err - end - end - - def update_user(%Client{} = client, id, params) do - headers = Fetcher.apply_client_headers(client) - uri = single_user_endpoint(id) - - client - |> Client.retrieve_auth_url(uri) - |> Fetcher.put(params, headers) - end -end diff --git a/apps/supabase_auth/lib/supabase/go_true/auth.ex b/apps/supabase_auth/lib/supabase/go_true/auth.ex deleted file mode 100644 index 504b941..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/auth.ex +++ /dev/null @@ -1,97 +0,0 @@ -defmodule Supabase.GoTrue.Auth do - @moduledoc false - - use Ecto.Schema - - import Ecto.Changeset - - @primary_key false - embedded_schema do - field(:url, :map) - field(:expiry_margin, :integer, default: 10) - - embeds_one :mfa, MFA, primary_key: false do - embeds_one :enroll, Enroll, primary_key: false do - field(:factor_type, Ecto.Enum, values: [:totp]) - field(:issue, :string) - field(:friendly_name, :string) - end - - embeds_one :unenroll, UnEnroll, primary_key: false do - field(:factor_id, :string) - end - - embeds_one :verify, Verify, primary_key: false do - field(:factor_id, :string) - field(:challenge_id, :string) - field(:code, :string) - end - - embeds_one :challenge, Challenge, primary_key: false do - field(:factor_id, :string) - end - - embeds_one :challenge_and_verify, ChallengeAndVerify, primary_key: false do - field(:factor_id, :string) - field(:code, :string) - end - end - - embeds_one :network_failure, NetWorkFailure, primary_key: false do - field(:max_retries, :integer, default: 10) - field(:retry_interval, :integer, default: 2) - end - end - - def parse(attrs, mfa \\ %{}) do - %__MODULE__{} - |> cast(attrs, ~w[expiry_margin]a) - |> put_assoc(:mfa, mfa, required: true) - |> cast_assoc(:network_failure, with: &network_failure_changeset/2, required: true) - end - - defp network_failure_changeset(failure, attrs) do - cast(failure, attrs, ~w[max_retries max_interval]) - end - - def parse_mfa(attrs) do - %__MODULE__.MFA{} - |> cast(attrs, []) - |> cast_assoc(:enroll, with: &enroll_changeset/2, required: true) - |> cast_assoc(:unenroll, with: &unenroll_changeset/2, required: true) - |> cast_assoc(:verify, with: &verify_changeset/2, required: true) - |> cast_assoc(:challenge, with: &challenge_changeset/2, required: true) - |> cast_assoc(:challenge_and_verify, with: &challenge_and_verify_changeset/2, required: true) - |> apply_action(:parse) - end - - defp enroll_changeset(enroll, attrs) do - enroll - |> cast(attrs, ~w[factor_type issuer friendly_name]a) - |> validate_required([:factor_type]) - end - - defp unenroll_changeset(unenroll, attrs) do - unenroll - |> cast(attrs, [:factor_id]) - |> validate_required([:factor_id]) - end - - defp verify_changeset(verify, attrs) do - verify - |> cast(attrs, [:factor_id, :challenge_id, :code]) - |> validate_required([:factor_id, :challenge_id, :code]) - end - - defp challenge_changeset(challenge, attrs) do - challenge - |> cast(attrs, [:factor_id]) - |> validate_required([:factor_id]) - end - - defp challenge_and_verify_changeset(challenge, attrs) do - challenge - |> cast(attrs, [:factor_id, :code]) - |> validate_required([:factor_id, :code]) - end -end diff --git a/apps/supabase_auth/lib/supabase/go_true/pkce.ex b/apps/supabase_auth/lib/supabase/go_true/pkce.ex deleted file mode 100644 index c4cdc02..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/pkce.ex +++ /dev/null @@ -1,18 +0,0 @@ -defmodule Supabase.GoTrue.PKCE do - @moduledoc false - - @verifier_length 56 - - def generate_verifier do - @verifier_length - |> :crypto.strong_rand_bytes() - |> Base.url_encode64(padding: false) - |> String.slice(0, @verifier_length) - end - - def generate_challenge(verifier) do - :sha256 - |> :crypto.hash(verifier) - |> Base.url_encode64(padding: false) - end -end diff --git a/apps/supabase_auth/lib/supabase/go_true/plug.ex b/apps/supabase_auth/lib/supabase/go_true/plug.ex deleted file mode 100644 index 060386d..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/plug.ex +++ /dev/null @@ -1,61 +0,0 @@ -defmodule Supabase.GoTrue.Plug do - @moduledoc false - - import Plug.Conn - import Supabase.Client, only: [is_client: 1] - - alias Plug.Conn - - @key "supabase_gotrue_token" - - def session_active?(%Conn{} = conn) do - key = :second |> System.os_time() |> to_string() - get_session(conn, key) == nil - rescue - ArgumentError -> false - end - - def authenticated?(%Conn{} = conn) do - not is_nil(conn.private[@key]) - end - - def put_current_token(%Conn{} = conn, token) do - put_private(conn, @key, token) - end - - def put_session_token(%Conn{} = conn, token) do - conn - |> put_session(@key, token) - |> configure_session(renew: true) - end - - def sig_in(%Conn{} = conn, client, attrs) when is_client(client) do - case maybe_sign_in(conn, client, attrs) do - {:ok, session} -> put_session_token(conn, session.access_token) - _ -> conn - end - end - - defp maybe_sign_in(conn, client, credentials) do - if session_active?(conn) do - Supabase.GoTrue.sign_in_with_password(client, credentials) - end - end - - def sign_out(%Conn{} = conn) do - if session_active?(conn) do - delete_session(conn, @key) - else - conn - end - end - - def fetch_token_from_cookies(%Conn{} = conn) do - token = conn.req_cookies[@key] || conn.req_cookies[to_string(@key)] - if token, do: {:ok, token}, else: {:error, :not_found} - end - - def current_token(%Conn{} = conn) do - conn.private[@key] - end -end diff --git a/apps/supabase_auth/lib/supabase/go_true/plug/verify_header.ex b/apps/supabase_auth/lib/supabase/go_true/plug/verify_header.ex deleted file mode 100644 index 0ce426e..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/plug/verify_header.ex +++ /dev/null @@ -1,24 +0,0 @@ -defmodule Supabase.GoTrue.Plug.VerifyHeader do - @moduledoc false - - import Plug.Conn - - alias Supabase.GoTrue - - @behaviour Plug - - @impl true - def init(opts \\ []), do: opts - - @impl true - def call(%Plug.Conn{} = conn, _opts) do - if GoTrue.Plug.current_token(conn) do - conn - else - case get_req_header(conn, :authorization) do - ["Bearer " <> token] -> GoTrue.Plug.put_current_token(conn, token) - _ -> halt(conn) - end - end - end -end diff --git a/apps/supabase_auth/lib/supabase/go_true/schemas/admin_user_params.ex b/apps/supabase_auth/lib/supabase/go_true/schemas/admin_user_params.ex deleted file mode 100644 index ef68c53..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/schemas/admin_user_params.ex +++ /dev/null @@ -1,41 +0,0 @@ -defmodule Supabase.GoTrue.Schemas.AdminUserParams do - @moduledoc false - - import Ecto.Changeset - - @types %{ - app_metadata: :map, - email_confirm: :boolean, - phone_confirm: :boolean, - ban_duration: :string, - role: :string, - email: :string, - phone: :string, - password: :string, - nonce: :string - } - - def parse(attrs) do - {%{}, @types} - |> cast(attrs, Map.keys(@types)) - |> validate_required_inclusion([:email, :phone]) - |> apply_action(:parse) - end - - defp validate_required_inclusion(%{valid?: false} = c, _), do: c - - defp validate_required_inclusion(changeset, fields) do - if Enum.any?(fields, &present?(changeset, &1)) do - changeset - else - changeset - |> add_error(:email, "at least an email or phone is required") - |> add_error(:phone, "at least an email or phone is required") - end - end - - defp present?(changeset, field) do - value = get_change(changeset, field) - value && value != "" - end -end diff --git a/apps/supabase_auth/lib/supabase/go_true/schemas/generate_link.ex b/apps/supabase_auth/lib/supabase/go_true/schemas/generate_link.ex deleted file mode 100644 index 9c18fbf..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/schemas/generate_link.ex +++ /dev/null @@ -1,119 +0,0 @@ -defmodule Supabase.GoTrue.Schemas.GenerateLink do - @moduledoc false - - import Ecto.Changeset - - @types ~w[signup invite magicLink recovery email_change_current email_change_new]a - - @options_types %{data: :map, redirect_to: :string} - - @base_types %{ - email: :string, - type: Ecto.ParameterizedType.init(Ecto.Enum, values: @types) - } - - @properties_types %{ - action_link: :string, - email_otp: :string, - hashed_token: :string, - redirect_to: :string, - verification_type: Ecto.ParameterizedType.init(Ecto.Enum, values: @types) - } - - def properties(attrs) do - {%{}, @properties_types} - |> cast(attrs, Map.keys(@properties_types)) - |> validate_required(Map.keys(@properties_types)) - |> apply_action(:parse) - end - - def parse(attrs) do - [ - &sign_up_params/1, - &invite_or_magic_link_params/1, - &recovery_params/1, - &email_change_params/1 - ] - |> Enum.reduce_while(nil, fn schema, _ -> - case result = schema.(attrs) do - {:ok, _} -> {:halt, result} - {:error, _} -> {:cont, result} - end - end) - end - - def sign_up_params(attrs) do - types = with_options(%{password: :string}) - - {%{}, types} - |> cast(attrs, Map.keys(types)) - |> validate_required([:email, :password, :type]) - |> validate_redirect_to() - |> validate_change(:type, fn _, type -> - check_type(type, :signup) - end) - |> apply_action(:parse) - end - - def invite_or_magic_link_params(attrs) do - types = with_options() - - {%{}, types} - |> cast(attrs, Map.keys(types) -- [:data]) - |> validate_required([:email, :type]) - |> validate_redirect_to() - |> validate_inclusion(:type, ~w[invite magicLink]a) - |> apply_action(:parse) - end - - def recovery_params(attrs) do - types = with_options() - - {%{}, types} - |> cast(attrs, Map.keys(types) -- [:data]) - |> validate_redirect_to() - |> validate_change(:type, fn _, type -> - check_type(type, :recovery) - end) - |> validate_required([:email, :type]) - |> apply_action(:parse) - end - - def email_change_params(attrs) do - types = with_options() - - {%{}, types} - |> cast(attrs, Map.keys(types) -- [:data]) - |> validate_required([:email, :type]) - |> validate_redirect_to() - |> validate_inclusion(:type, ~w[email_change_current email_change_new]a) - |> apply_action(:parse) - end - - defp with_options(types \\ %{}) do - @base_types - |> Map.merge(types) - |> Map.merge(@options_types) - end - - defp check_type(current, desired) do - if current == desired do - [] - else - [type: "need to be #{desired} for this schema"] - end - end - - defp validate_redirect_to(%{valid?: false} = changeset), do: changeset - - defp validate_redirect_to(changeset) do - if redirect_to = get_change(changeset, :redirect_to) do - case URI.new(redirect_to) do - {:ok, uri} -> put_change(changeset, :redirect_to, URI.to_string(uri)) - {:error, error} -> add_error(changeset, :redirect_to, error) - end - else - changeset - end - end -end diff --git a/apps/supabase_auth/lib/supabase/go_true/schemas/invite_user_params.ex b/apps/supabase_auth/lib/supabase/go_true/schemas/invite_user_params.ex deleted file mode 100644 index 9831f2e..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/schemas/invite_user_params.ex +++ /dev/null @@ -1,35 +0,0 @@ -defmodule Supabase.GoTrue.Schemas.InviteUserParams do - @moduledoc false - - use Supabase, :schema - - @type t :: %__MODULE__{ - data: map, - redirect_to: URI.t() - } - - embedded_schema do - field(:data, :map) - field(:redirect_to, :map) - end - - def parse(attrs) do - %__MODULE__{} - |> cast(attrs, [:data, :redirect_to]) - |> parse_uri() - |> apply_action(:parse) - end - - defp parse_uri(changeset) do - redirect_to = get_change(changeset, :redirect_to) - - if redirect_to do - case URI.new(redirect_to) do - {:ok, uri} -> put_change(changeset, :redirect_to, uri) - {:error, reason} -> add_error(changeset, :redirect_to, "Invalid URI: #{reason}") - end - else - changeset - end - end -end diff --git a/apps/supabase_auth/lib/supabase/go_true/schemas/pagination_params.ex b/apps/supabase_auth/lib/supabase/go_true/schemas/pagination_params.ex deleted file mode 100644 index b5e6ecb..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/schemas/pagination_params.ex +++ /dev/null @@ -1,24 +0,0 @@ -defmodule Supabase.GoTrue.Schemas.PaginationParams do - @moduledoc false - - use Ecto.Schema - - import Ecto.Changeset - - def page_params(attrs) do - schema = %{page: :integer, per_page: :integer} - - {%{}, schema} - |> cast(attrs, Map.keys(schema)) - |> apply_action(:parse) - end - - def pagination(attrs) do - schema = %{next_page: :integer, last_page: :integer, total: :integer} - - {%{}, schema} - |> cast(attrs, Map.keys(schema)) - |> validate_required([:total, :last_page]) - |> apply_action(:parse) - end -end diff --git a/apps/supabase_auth/lib/supabase/go_true/schemas/sign_in_with_password.ex b/apps/supabase_auth/lib/supabase/go_true/schemas/sign_in_with_password.ex deleted file mode 100644 index cae15c8..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/schemas/sign_in_with_password.ex +++ /dev/null @@ -1,46 +0,0 @@ -defmodule Supabase.GoTrue.Schemas.SignInWithPassword do - @moduledoc false - - use Ecto.Schema - - import Ecto.Changeset - - @primary_key false - embedded_schema do - field(:email, :string) - field(:phone, :string) - field(:password, :string) - - embeds_one :options, Options, primary_key: false do - field(:data, :map) - field(:captcha_token, :string) - end - end - - def to_sign_in_params(%__MODULE__{} = signin) do - Map.take(signin, [:email, :phone, :password]) - end - - def parse(attrs) do - %__MODULE__{} - |> cast(attrs, ~w[email phone password]a) - |> cast_embed(:options, with: &options_changeset/2, required: false) - |> validate_required([:password]) - |> maybe_put_default_options() - |> apply_action(:parse) - end - - defp maybe_put_default_options(%{valid?: false} = c), do: c - - defp maybe_put_default_options(changeset) do - if get_embed(changeset, :options) do - changeset - else - put_embed(changeset, :options, %__MODULE__.Options{}) - end - end - - defp options_changeset(options, attrs) do - cast(options, attrs, ~w[email_redirect_to data captcha_token]a) - end -end diff --git a/apps/supabase_auth/lib/supabase/go_true/schemas/sign_up_request.ex b/apps/supabase_auth/lib/supabase/go_true/schemas/sign_up_request.ex deleted file mode 100644 index 035a24d..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/schemas/sign_up_request.ex +++ /dev/null @@ -1,50 +0,0 @@ -defmodule Supabase.GoTrue.Schemas.SignUpRequest do - @moduledoc false - - use Ecto.Schema - - import Ecto.Changeset - - alias Supabase.GoTrue.Schemas.SignUpWithPassword - - @required_fields ~w[password]a - @optional_fields ~w[email phone data code_challenge code_challenge_method]a - - @derive Jason.Encoder - @primary_key false - embedded_schema do - field(:email, :string) - field(:phone, :string) - field(:password, :string) - field(:data, :map, default: %{}) - field(:code_challenge, :string) - field(:code_challenge_method, :string) - - embeds_one :gotrue_meta_security, GoTrueMetaSecurity, primary_key: false do - @derive Jason.Encoder - field(:captcha_token, :string) - end - end - - def changeset(signup \\ %__MODULE__{}, attrs, go_true_meta) do - signup - |> cast(attrs, @required_fields ++ @optional_fields) - |> put_embed(:gotrue_meta_security, go_true_meta) - |> validate_required(@required_fields) - |> apply_action(:insert) - end - - def create(%SignUpWithPassword{} = signup) do - attrs = SignUpWithPassword.to_sign_up_params(signup) - go_true_meta = %__MODULE__.GoTrueMetaSecurity{captcha_token: signup.options.captcha_token} - - changeset(attrs, go_true_meta) - end - - def create(%SignUpWithPassword{} = signup, code_challenge, code_method) do - attrs = SignUpWithPassword.to_sign_up_params(signup, code_challenge, code_method) - go_true_meta = %__MODULE__.GoTrueMetaSecurity{captcha_token: signup.options.captcha_token} - - changeset(attrs, go_true_meta) - end -end diff --git a/apps/supabase_auth/lib/supabase/go_true/schemas/sign_up_with_password.ex b/apps/supabase_auth/lib/supabase/go_true/schemas/sign_up_with_password.ex deleted file mode 100644 index 1482765..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/schemas/sign_up_with_password.ex +++ /dev/null @@ -1,98 +0,0 @@ -defmodule Supabase.GoTrue.Schemas.SignUpWithPassword do - @moduledoc false - - use Ecto.Schema - - import Ecto.Changeset - - @type options :: %__MODULE__.Options{ - email_redirect_to: URI.t() | nil, - data: map | nil, - captcha_token: String.t() | nil - } - - @type t :: %__MODULE__{ - email: String.t() | nil, - password: String.t(), - phone: String.t() | nil, - options: list(options) | nil - } - - @derive Jason.Encoder - @primary_key false - embedded_schema do - field(:email, :string) - field(:password, :string) - field(:phone, :string) - - embeds_one :options, Options, primary_key: false do - field(:email_redirect_to, :map) - field(:data, :map) - field(:captcha_token, :string) - end - end - - def to_sign_up_params(%__MODULE__{} = signup) do - Map.take(signup, [:email, :password, :phone]) - end - - def to_sign_up_params(%__MODULE__{} = signup, code_challenge, code_method) do - signup - |> to_sign_up_params() - |> Map.merge(%{code_challange: code_challenge, code_challenge_method: code_method}) - end - - @spec validate(map) :: Ecto.Changeset.t() - def validate(attrs) do - %__MODULE__{} - |> cast(attrs, [:email, :password, :phone]) - |> cast_embed(:options, with: &options_changeset/2, required: false) - |> maybe_put_default_options() - |> validate_email_or_phone() - |> validate_required([:password]) - end - - defp maybe_put_default_options(%{valid?: false} = c), do: c - - defp maybe_put_default_options(changeset) do - if get_embed(changeset, :options) do - changeset - else - put_embed(changeset, :options, %__MODULE__.Options{}) - end - end - - defp options_changeset(options, attrs) do - cast(options, attrs, ~w[email_redirect_to data captcha_token]a) - end - - defp validate_email_or_phone(changeset) do - email = get_change(changeset, :email) - phone = get_change(changeset, :phone) - - case {email, phone} do - {nil, nil} -> - changeset - |> add_error(:email, "or phone can't be blank") - |> add_error(:phone, "or email can't be blank") - - {email, nil} when is_binary(email) -> - changeset - - {nil, phone} when is_binary(phone) -> - changeset - - {email, phone} when is_binary(email) and is_binary(phone) -> - changeset - |> add_error(:email, "can't be given with phone") - |> add_error(:phone, "can't be given with email") - end - end - - @spec parse(map) :: {:ok, t} | {:error, Ecto.Changeset.t()} - def parse(attrs) do - attrs - |> validate() - |> apply_action(:parse) - end -end diff --git a/apps/supabase_auth/lib/supabase/go_true/session.ex b/apps/supabase_auth/lib/supabase/go_true/session.ex deleted file mode 100644 index 4f19351..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/session.ex +++ /dev/null @@ -1,46 +0,0 @@ -defmodule Supabase.GoTrue.Session do - @moduledoc false - - use Ecto.Schema - - import Ecto.Changeset - - alias Supabase.GoTrue.User - - @type t :: %__MODULE__{ - provider_token: String.t() | nil, - provider_refresh_token: String.t() | nil, - access_token: String.t(), - refresh_token: String.t(), - expires_in: integer, - expires_at: NaiveDateTime.t() | nil, - token_type: String.t(), - user: User.t() - } - - @required_fields ~w[access_token refresh_token expires_in token_type]a - @optional_fields ~w[provider_token provider_refresh_token expires_at]a - - @derive Jason.Encoder - @primary_key false - embedded_schema do - field(:provider_token, :string) - field(:provider_refresh_token, :string) - field(:access_token, :string) - field(:refresh_token, :string) - field(:expires_in, :integer) - field(:expires_at, :integer) - field(:token_type, :string) - - embeds_one(:user, User) - end - - @spec parse(map) :: {:ok, t} | {:error, Ecto.Changeset.t()} - def parse(attrs) do - %__MODULE__{} - |> cast(attrs, @required_fields ++ @optional_fields) - |> validate_required(@required_fields) - |> cast_embed(:user, required: false) - |> apply_action(:parse) - end -end diff --git a/apps/supabase_auth/lib/supabase/go_true/user.ex b/apps/supabase_auth/lib/supabase/go_true/user.ex deleted file mode 100644 index cefe90e..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/user.ex +++ /dev/null @@ -1,98 +0,0 @@ -defmodule Supabase.GoTrue.User do - @moduledoc false - - use Ecto.Schema - - import Ecto.Changeset - - alias Supabase.GoTrue.User.Factor - alias Supabase.GoTrue.User.Identity - - @type t :: %__MODULE__{ - id: Ecto.UUID.t(), - app_metadata: map, - user_metadata: map, - aud: String.t(), - confirmation_sent_at: NaiveDateTime.t() | nil, - recovery_sent_at: NaiveDateTime.t() | nil, - email_change_sent_at: NaiveDateTime.t() | nil, - new_email: String.t() | nil, - new_phone: String.t() | nil, - invited_at: NaiveDateTime.t() | nil, - action_link: String.t() | nil, - email: String.t() | nil, - phone: String.t() | nil, - created_at: NaiveDateTime.t(), - confirmed_at: NaiveDateTime.t() | nil, - email_confirmed_at: NaiveDateTime.t() | nil, - phone_confirmed_at: NaiveDateTime.t() | nil, - last_sign_in_at: NaiveDateTime.t() | nil, - role: String.t() | nil, - updated_at: NaiveDateTime.t() | nil, - identities: list(Identity) | nil, - factors: list(Factor) | nil - } - - @required_fields ~w[id app_metadata app_metadata aud created_at]a - @optional_fields ~w[confirmation_sent_at recovery_sent_at email_change_sent_at new_email new_phone invited_at action_link email phone confirmed_at email_confirmed_at phone_confirmed_at last_sign_in_at role]a - - @primary_key {:id, :binary_id, autogenerate: false} - embedded_schema do - field(:app_metadata, :map) - field(:user_metadata, :map) - field(:aud, :string) - field(:confirmation_sent_at, :naive_datetime) - field(:recovery_sent_at, :naive_datetime) - field(:email_change_sent_at, :naive_datetime) - field(:new_email, :string) - field(:new_phone, :string) - field(:invited_at, :naive_datetime) - field(:action_link, :string) - field(:email, :string) - field(:phone, :string) - field(:confirmed_at, :naive_datetime) - field(:email_confirmed_at, :naive_datetime) - field(:phone_confirmed_at, :naive_datetime) - field(:last_sign_in_at, :naive_datetime) - field(:role, :string) - - embeds_many(:factors, Supabase.GoTrue.User.Factor) - embeds_many(:identities, Supabase.GoTrue.User.Identity) - - timestamps(inserted_at: :created_at) - end - - def changeset(user \\ %__MODULE__{}, attrs) do - user - |> cast(attrs, @required_fields ++ @optional_fields) - |> validate_required(@required_fields) - |> cast_embed(:identities, required: true) - |> cast_embed(:factors, required: false) - end - - def multiple_changeset(user \\ %__MODULE__{}, attrs) do - user - |> cast(attrs, @required_fields ++ @optional_fields) - |> validate_required(@required_fields) - end - - def parse(attrs) do - attrs - |> changeset() - |> apply_action(:parse) - end - - def parse_list(list_attrs) do - results = - Enum.reduce_while(list_attrs, [], fn attrs, acc -> - changeset = multiple_changeset(attrs) - - case result = apply_action(changeset, :parse) do - {:ok, user} -> {:cont, [user | acc]} - {:error, _} -> {:halt, result} - end - end) - - if is_list(results), do: {:ok, results}, else: results - end -end diff --git a/apps/supabase_auth/lib/supabase/go_true/user/factor.ex b/apps/supabase_auth/lib/supabase/go_true/user/factor.ex deleted file mode 100644 index 04cc512..0000000 --- a/apps/supabase_auth/lib/supabase/go_true/user/factor.ex +++ /dev/null @@ -1,32 +0,0 @@ -defmodule Supabase.GoTrue.User.Factor do - @moduledoc false - - use Ecto.Schema - - import Ecto.Changeset - - @type t :: %__MODULE__{ - id: Ecto.UUID.t(), - friendly_name: String.t() | nil, - factor_type: :totp, - status: :verified | :unverified, - created_at: NaiveDateTime.t(), - updated_at: NaiveDateTime.t() - } - - @derive Jason.Encoder - @primary_key {:id, :binary_id, autogenerate: false} - embedded_schema do - field(:friendly_name, :string) - field(:factor_type, Ecto.Enum, values: ~w[totp]a) - field(:status, Ecto.Enum, values: ~w[verified unverified]a) - - timestamps(inserted_at: :created_at) - end - - def changeset(factor \\ %__MODULE__{}, attrs) do - factor - |> cast(attrs, ~w[id friendly_name factor_type status created_at updated_at]a) - |> validate_required(~w[id factor_type status created_at updated_at]a) - end -end diff --git a/apps/supabase_auth/mix.exs b/apps/supabase_auth/mix.exs deleted file mode 100644 index 62dae3b..0000000 --- a/apps/supabase_auth/mix.exs +++ /dev/null @@ -1,67 +0,0 @@ -defmodule SupabaseAuth.MixProject do - use Mix.Project - - @version "0.1.0" - @source_url "https://github.com/zoedsoupe/supabase" - - def project do - [ - app: :supabase_auth, - version: @version, - build_path: "../../_build", - deps_path: "../../deps", - lockfile: "../../mix.lock", - elixir: "~> 1.15", - start_permanent: Mix.env() == :prod, - deps: deps(), - docs: docs(), - package: package(), - description: description() - ] - end - - # Run "mix help compile.app" to learn about applications. - def application do - [ - extra_applications: [:logger] - ] - end - - # Run "mix help deps" to learn about dependencies. - defp deps do - [ - {:plug, "~> 1.15"}, - {:supabase_potion, umbrella_dep(Mix.env())}, - {:ex_doc, ">= 0.0.0", only: [:dev, :prod], runtime: false} - ] - end - - defp umbrella_dep(e) when e in [:dev, :test], do: [in_umbrella: true] - defp umbrella_dep(:prod), do: "~> 0.2" - - defp package do - %{ - licenses: ["MIT"], - contributors: ["zoedsoupe"], - links: %{ - "GitHub" => @source_url, - "Docs" => "https://hexdocs.pm/supabase_auth" - }, - files: ~w[lib mix.exs README.md LICENSE] - } - end - - defp docs do - [ - main: "Supabase.GoTrue", - extras: ["README.md"] - ] - end - - defp description do - """ - Integration with the GoTrue API from Supabase services. - Provide authentication with MFA, password and magic link. - """ - end -end diff --git a/apps/supabase_postgrest/.formatter.exs b/apps/supabase_postgrest/.formatter.exs deleted file mode 100644 index d2cda26..0000000 --- a/apps/supabase_postgrest/.formatter.exs +++ /dev/null @@ -1,4 +0,0 @@ -# Used by "mix format" -[ - inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] -] diff --git a/apps/supabase_postgrest/.gitignore b/apps/supabase_postgrest/.gitignore deleted file mode 100644 index ea0e15f..0000000 --- a/apps/supabase_postgrest/.gitignore +++ /dev/null @@ -1,40 +0,0 @@ -# The directory Mix will write compiled artifacts to. -/_build/ - -# If you run "mix test --cover", coverage assets end up here. -/cover/ - -# The directory Mix downloads your dependencies sources to. -/deps/ - -# Where third-party dependencies like ExDoc output generated docs. -/doc/ - -# Ignore .fetch files in case you like to edit your project deps locally. -/.fetch - -# If the VM crashes, it generates a dump, let's ignore it too. -erl_crash.dump - -# Also ignore archive artifacts (built via "mix archive.build"). -*.ez - -# Temporary files, for example, from tests. -/tmp/ - -# Nix files -/.nix-mix/ -/.postgres/ -result - -# Secrets files -.env - -# LSP files -/.lexical/ - -# Nix files -result - -/.elixir_ls/ -/.elixir-tools/ diff --git a/apps/supabase_postgrest/README.md b/apps/supabase_postgrest/README.md deleted file mode 100644 index 714541b..0000000 --- a/apps/supabase_postgrest/README.md +++ /dev/null @@ -1,21 +0,0 @@ -# Supabase.PostgREST - -**TODO: Add description** - -## Installation - -If [available in Hex](https://hex.pm/docs/publish), the package can be installed -by adding `supabase_postgrest` to your list of dependencies in `mix.exs`: - -```elixir -def deps do - [ - {:supabase_postgrest, "~> 0.1.0"} - ] -end -``` - -Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc) -and published on [HexDocs](https://hexdocs.pm). Once published, the docs can -be found at . - diff --git a/apps/supabase_postgrest/lib/supabase/postgrest.ex b/apps/supabase_postgrest/lib/supabase/postgrest.ex deleted file mode 100644 index bf8b07d..0000000 --- a/apps/supabase_postgrest/lib/supabase/postgrest.ex +++ /dev/null @@ -1,128 +0,0 @@ -defmodule Supabase.PostgREST do - @moduledoc false - - use Ecto.Schema - import Ecto.Changeset - - alias Supabase.Client - alias Supabase.Fetcher - - @type t :: %__MODULE__{ - path: URI.t(), - headers: map, - schema: String.t(), - method: :get | :post | :put | :delete, - body: map, - params: list(String.t()) - } - - @default_path "/rest/v1" - - @default_headers %{ - "accept" => "application/json", - "content-type" => "application/json", - "content-profile" => "$schema", - "accept-profile" => "$schema" - } - - @primary_key false - embedded_schema do - field(:host, :map, default: URI.new!("http://localhost:3000")) - field(:path, :map, default: URI.new!(@default_path)) - field(:headers, {:map, :string}) - field(:schema, :string, default: "public") - field(:method, Ecto.Enum, values: ~w[get post put delete]a, default: :get) - field(:body, :map) - field(:params, {:array, :string}) - - field(:url, :string, virtual: true) - end - - def to_map(%__MODULE__{} = rest) do - rest - |> Map.from_struct() - |> Map.put(:url, URI.to_string(URI.merge(rest.host, rest.path))) - |> Map.update(:headers, rest.headers, &Map.to_list/1) - end - - @doc false - def changeset(rest \\ %__MODULE__{}, attrs) do - rest - |> cast(attrs, [:host, :path, :headers, :schema, :method, :body, :params]) - |> maybe_merge_headers() - |> validate_required([:method, :headers]) - |> apply_action(:parse) - end - - defp maybe_merge_headers(%{valid?: false} = changeset), do: changeset - - defp maybe_merge_headers(changeset) do - headers = get_change(changeset, :headers) - schema = get_change(changeset, :schema) || get_field(changeset, :schema) - - merged = - @default_headers - |> Map.replace("content-profile", schema) - |> Map.replace("accept-profile", schema) - |> Map.merge(headers || %{}) - - put_change(changeset, :headers, merged) - end - - @spec from_supabase_client(Client.t()) :: {:ok, __MODULE__.t()} | {:error, Ecto.Changeset.t()} - def from_supabase_client(%Client{} = client) do - with {:ok, rest} <- - changeset(%{ - schema: client.db.schema, - host: URI.new!(client.conn.base_url), - method: :get, - headers: client.global.headers - }) do - __MODULE__.schema(rest, client.db.schema) - end - end - - @spec from(__MODULE__.t(), String.t()) :: __MODULE__.t() | {:error, Ecto.Changeset.t()} - def from(%__MODULE__{} = rest, relation) do - with {:ok, rest} <- changeset(rest, %{path: URI.merge(rest.path, relation)}) do - rest - end - end - - @spec schema(__MODULE__.t(), String.t()) :: __MODULE__.t() | {:error, Ecto.Changeset.t()} - def schema(%__MODULE__{} = rest, schema) do - headers = %{"accept-profile" => schema, "content-profile" => schema} - - with {:ok, rest} <- changeset(rest, %{headers: headers, schema: schema, method: :get}) do - rest - end - end - - @spec call(__MODULE__.t()) :: Finch.Response.t() - def call(%__MODULE__{} = rest) do - req = to_map(rest) - - task = - Task.async(fn -> - cond do - req.method in [:get, :delete] -> - apply(Fetcher, req.method, [req.url, req.headers]) - - req.method in [:put, :post] -> - apply(Fetcher, req.method, [req.url, req.body, req.headers]) - end - end) - - Task.await(task) - end - - @spec ping(__MODULE__.t()) :: :pong | :error - def ping(%__MODULE__{} = rest) do - {:ok, temp} = changeset(rest, %{schema: "", method: :get}) - - case call(temp) do - {:ok, _result} -> :pong - _ -> :error - end - end -end diff --git a/apps/supabase_postgrest/lib/supabase/postgrest/application.ex b/apps/supabase_postgrest/lib/supabase/postgrest/application.ex deleted file mode 100644 index ff685fb..0000000 --- a/apps/supabase_postgrest/lib/supabase/postgrest/application.ex +++ /dev/null @@ -1,13 +0,0 @@ -defmodule Supabase.PostgREST.Application do - @moduledoc false - - use Application - - @impl true - def start(_type, _args) do - children = [Supabase.PostgREST.Repo, Supabase.PostgREST.EctoAdapter.Connection] - - opts = [strategy: :one_for_one, name: Supabase.PostgREST.Supervisor] - Supervisor.start_link(children, opts) - end -end diff --git a/apps/supabase_postgrest/lib/supabase/postgrest/ecto_adapter.ex b/apps/supabase_postgrest/lib/supabase/postgrest/ecto_adapter.ex deleted file mode 100644 index e0264fa..0000000 --- a/apps/supabase_postgrest/lib/supabase/postgrest/ecto_adapter.ex +++ /dev/null @@ -1,83 +0,0 @@ -defmodule Supabase.PostgREST.EctoAdapter do - @moduledoc false - - alias Supabase.PostgREST.EctoAdapter.Connection - - @behaviour Ecto.Adapter - @behaviour Ecto.Adapter.Schema - @behaviour Ecto.Adapter.Queryable - - @impl Ecto.Adapter - defmacro __before_compile__(_), do: [] - - @impl Ecto.Adapter - def checkout(_meta, _config, fun) do - fun.() - end - - @impl Ecto.Adapter - def checked_out?(_meta), do: false - - @impl Ecto.Adapter - def ensure_all_started(options, _type) do - # IO.inspect(options, label: "ENSURE SYTARTED OPTS") - Application.ensure_all_started(:supabase_potion) - Application.ensure_all_started(:supabase_postgrest) - end - - @impl Ecto.Adapter - def init(opts \\ []) do - {:ok, Connection.child_spec(opts), %{name: Connection, opts: opts}} - end - - @impl Ecto.Adapter - def dumpers(_, type), do: [type] - - @impl Ecto.Adapter - def loaders(_, type), do: [type] - - @impl Ecto.Adapter.Schema - def autogenerate(:id), do: nil - def autogenerate(:binary_id), do: Ecto.UUID.generate() - - @impl Ecto.Adapter.Schema - def insert(_adapter_meta, _schema_meta, attrs, _on_conflict, returning, _opts) do - # IO.inspect(attrs, label: "CREATE ATTRS") - # IO.inspect(returning, label: "CREATE RETURNING") - {:ok, attrs} - end - - @impl Ecto.Adapter.Schema - def update(_adapter_meta, _schema_meta, attrs, filters, returning, _opts) do - # IO.inspect(attrs, label: "UPDATE ATTRS") - # IO.inspect(filters, label: "UPDATE FILTERS") - # IO.inspect(returning, label: "UPDATE RETURNING") - {:ok, attrs} - end - - @impl Ecto.Adapter.Schema - def delete(_adapter_meta, _schema_meta, filters, _opts) do - # IO.inspect(filters, label: "DELETE FILTERS") - {:ok, []} - end - - @impl Ecto.Adapter.Queryable - def prepare(_type, %Ecto.Query{} = query) do - # req = PostgREST.Query.from_ecto_query(query) - {:no_cache, query} - end - - @impl Ecto.Adapter.Queryable - def execute(_adapter_meta, query_meta, _query_cache, params, _opts) do - # IO.inspect(query_meta, label: "QUERY META") - # IO.inspect(params, label: "ExECUTE PARAMS") - {1, nil} - end - - @impl Ecto.Adapter.Queryable - def stream(_adapter_meta, query_meta, _query_cache, params, _opts) do - # IO.inspect(query_meta, label: "STREAM QUERY META") - # IO.inspect(params, label: "STREAM PARAMS") - Stream.repeatedly(fn -> {1, nil} end) - end -end diff --git a/apps/supabase_postgrest/lib/supabase/postgrest/ecto_adapter/connection.ex b/apps/supabase_postgrest/lib/supabase/postgrest/ecto_adapter/connection.ex deleted file mode 100644 index 0937ebc..0000000 --- a/apps/supabase_postgrest/lib/supabase/postgrest/ecto_adapter/connection.ex +++ /dev/null @@ -1,45 +0,0 @@ -defmodule Supabase.PostgREST.EctoAdapter.Connection do - @moduledoc false - - use Agent - - alias Supabase.PostgREST - - @type opts :: [host: String.t(), schema: String.t()] - - def start_link(_opts) do - Agent.start_link( - fn -> - %PostgREST{} - end, - name: __MODULE__ - ) - |> case do - {:ok, pid} -> {:ok, pid} - {:error, {:already_started, pid}} -> {:ok, pid} - {:error, reason} -> {:error, reason} - end - end - - def update(opts) do - Agent.update(__MODULE__, fn rest -> - attrs = - opts - |> Keyword.take(~w[host schema]a) - |> Map.new() - |> Map.update!(:host, &URI.new!/1) - - rest - |> PostgREST.changeset(attrs) - |> case do - {:ok, rest} -> rest - {:error, _} -> nil - end - end) - end - - @spec apply_connection_change((PostgREST.t() -> PostgREST.t())) :: :ok - def apply_connection_change(fun) do - Agent.update(__MODULE__, fun) - end -end diff --git a/apps/supabase_postgrest/lib/supabase/postgrest/repo.ex b/apps/supabase_postgrest/lib/supabase/postgrest/repo.ex deleted file mode 100644 index 839f551..0000000 --- a/apps/supabase_postgrest/lib/supabase/postgrest/repo.ex +++ /dev/null @@ -1,5 +0,0 @@ -defmodule Supabase.PostgREST.Repo do - @moduledoc false - - use Ecto.Repo, otp_app: :supabase_postgrest, adapter: Supabase.PostgREST.EctoAdapter -end diff --git a/apps/supabase_postgrest/mix.exs b/apps/supabase_postgrest/mix.exs deleted file mode 100644 index a58ca30..0000000 --- a/apps/supabase_postgrest/mix.exs +++ /dev/null @@ -1,68 +0,0 @@ -defmodule Supabase.PostgREST.MixProject do - use Mix.Project - - @version "0.2.0" - @source_url "https://github.com/zoedsoupe/supabase_storage" - - def project do - [ - app: :supabase_postgrest, - version: @version, - elixir: "~> 1.15", - build_path: "../../_build", - deps_path: "../../deps", - lockfile: "../../mix.lock", - start_permanent: Mix.env() == :prod, - docs: docs(), - deps: deps(), - package: package(), - description: description() - ] - end - - def application do - [ - extra_applications: [:logger], - mod: {Supabase.PostgREST.Application, []} - ] - end - - defp deps do - [ - {:ecto, "~> 3.10"}, - {:supabase_potion, umbrella_dep(Mix.env())}, - {:ex_doc, ">= 0.0.0", runtime: false}, - {:credo, "~> 1.7", only: [:dev, :test], runtime: false}, - {:dialyxir, "~> 1.3", only: [:dev], runtime: false} - ] - end - - defp umbrella_dep(:prod), do: "~> 0.2" - defp umbrella_dep(_), do: [in_umbrella: true] - - defp package do - %{ - name: "supabase_postgrest", - licenses: ["MIT"], - contributors: ["zoedsoupe"], - links: %{ - "GitHub" => @source_url, - "Docs" => "https://hexdocs.pm/supabase_postgrest" - }, - files: ~w[lib mix.exs README.md LICENSE] - } - end - - defp docs do - [ - main: "Supabase.PostgREST", - extras: ["README.md"] - ] - end - - defp description do - """ - High level Elixir client for Supabase PostgREST and Ecto Adapter. - """ - end -end diff --git a/apps/supabase_potion/.formatter.exs b/apps/supabase_potion/.formatter.exs deleted file mode 100644 index 90a0853..0000000 --- a/apps/supabase_potion/.formatter.exs +++ /dev/null @@ -1,5 +0,0 @@ -# Used by "mix format" -[ - inputs: ["mix.exs", "config/*.exs"], - subdirectories: ["apps/*"] -] diff --git a/apps/supabase_potion/.gitignore b/apps/supabase_potion/.gitignore deleted file mode 100644 index ea0e15f..0000000 --- a/apps/supabase_potion/.gitignore +++ /dev/null @@ -1,40 +0,0 @@ -# The directory Mix will write compiled artifacts to. -/_build/ - -# If you run "mix test --cover", coverage assets end up here. -/cover/ - -# The directory Mix downloads your dependencies sources to. -/deps/ - -# Where third-party dependencies like ExDoc output generated docs. -/doc/ - -# Ignore .fetch files in case you like to edit your project deps locally. -/.fetch - -# If the VM crashes, it generates a dump, let's ignore it too. -erl_crash.dump - -# Also ignore archive artifacts (built via "mix archive.build"). -*.ez - -# Temporary files, for example, from tests. -/tmp/ - -# Nix files -/.nix-mix/ -/.postgres/ -result - -# Secrets files -.env - -# LSP files -/.lexical/ - -# Nix files -result - -/.elixir_ls/ -/.elixir-tools/ diff --git a/apps/supabase_potion/LICENSE b/apps/supabase_potion/LICENSE deleted file mode 100644 index d448fa4..0000000 --- a/apps/supabase_potion/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2023 Zoey Pessanha - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. diff --git a/apps/supabase_potion/README.md b/apps/supabase_potion/README.md deleted file mode 100644 index 33d405b..0000000 --- a/apps/supabase_potion/README.md +++ /dev/null @@ -1,96 +0,0 @@ -# Supabase Potion - -Where the magic starts! - -## Getting Started - -### Installation - -To install the base SDK: - -```elixir -def deps do - [ - {:supabase_potion, "~> 0.3"} - ] -end -``` - -### Clients - -A `Supabase.Client` is an Agent that holds general information about Supabase, that can be used to intereact with any of the children integrations, for example: `Supabase.Storage` or `Supabase.UI`. - -`Supabase.Client` is defined as: - -- `:name` - the name of the client, started by `start_link/1` -- `:conn` - connection information, the only required option as it is vital to the `Supabase.Client`. - - `:base_url` - The base url of the Supabase API, it is usually in the form `https://.supabase.io`. - - `:api_key` - The API key used to authenticate requests to the Supabase API. - - `:access_token` - Token with specific permissions to access the Supabase API, it is usually the same as the API key. -- `:db` - default database options - - `:schema` - default schema to use, defaults to `"public"` -- `:global` - global options config - - `:headers` - additional headers to use on each request -- `:auth` - authentication options - - `:auto_refresh_token` - automatically refresh the token when it expires, defaults to `true` - - `:debug` - enable debug mode, defaults to `false` - - `:detect_session_in_url` - detect session in URL, defaults to `true` - - `:flow_type` - authentication flow type, defaults to `"web"` - - `:persist_session` - persist session, defaults to `true` - - `:storage` - storage type - - `:storage_key` - storage key - -## Usage - -The Supabase Elixir SDK provides a flexible way to manage `Supabase.Client` instances, which can, in turn, manage multiple `Supabase.Client` instances. Here's a brief overview of the key concepts: - -### Starting a Client - -You can start a client using the `Supabase.Client.start_link/1` function. However, it's recommended to use `Supabase.init_client!/1`, which allows you to pass client options and automatically manage `Supabase.Client` processes. - -```elixir -iex> Supabase.Client.init_client!(%{conn: %{base_url: "", api_key: ""}}) -{:ok, #PID<0.123.0>} -``` - -## Configuration - -Ensure your Supabase configurations are set: - -```elixir -import Config - -config :supabase, - manage_clients: false, - supabase_base_url: System.fetch_env!("SUPABASE_BASE_URL"), - supabase_api_key: System.fetch_env!("SUPABASE_API_KEY"), -``` - -- `supabase_base_url`: The base URL of your Supabase project! More information on how to find it can be seen on the [next section](#how-to-find-my-supabase-base-url?) -- `supabase_api_key`: The secret of your Supabase project! More information on how to find it can be seen on the [next section](#how-to-find-my-supabase-api-key?) -- `manage_clients`: Enable or disable the internal client management by the library. Defauls to `true`, see more on the [library supervisor documentation](https://hexdocs.pm/supabase_potion/Supabase.ClientSupervisor.html). - -Make sure to set the environment variables `SUPABASE_BASE_URL` and `SUPABASE_API_KEY`. - -### How to find my Supabase base URL? - -You can find your Supabase base URL in the Settings page of your project. -Firstly select your project from the initial Dashboard. -On the left sidebar, click on the Settings icon, then select API. -The base URL is the first field on the page. - -### How to find my Supabase API Key? - -You can find your Supabase API key in the Settings page of your project. -Firstly select your project from the initial Dashboard. -On the left sidebar, click on the Settings icon, then select API. -The API key is the second field on the page. - -There two types of API keys, the public and the private. The last one -bypass any Row Level Security (RLS) rules you have set up. -So you shouldn't use it in your frontend application. - -If you don't know what RLS is, you can read more about it here: -https://supabase.com/docs/guides/auth/row-level-security - -For most cases you should prefer to use the public "anon" Key. diff --git a/apps/supabase_potion/mix.exs b/apps/supabase_potion/mix.exs deleted file mode 100644 index 7b9b685..0000000 --- a/apps/supabase_potion/mix.exs +++ /dev/null @@ -1,65 +0,0 @@ -defmodule Supabase.Potion.MixProject do - use Mix.Project - - @version "0.3.0" - @source_url "https://github.com/zoedsoupe/supabase" - - def project do - [ - app: :supabase_potion, - version: @version, - build_path: "../../_build", - deps_path: "../../deps", - lockfile: "../../mix.lock", - elixir: "~> 1.14", - start_permanent: Mix.env() == :prod, - deps: deps(), - docs: docs(), - package: package(), - description: description() - ] - end - - def application do - [ - mod: {Supabase.Application, []}, - extra_applications: [:logger] - ] - end - - defp deps do - [ - {:finch, "~> 0.16"}, - {:jason, "~> 1.4"}, - {:ecto, "~> 3.10"}, - {:ex_doc, ">= 0.0.0", runtime: false, only: [:dev, :prod]}, - {:credo, "~> 1.7", only: [:dev, :test], runtime: false}, - {:dialyxir, "~> 1.3", only: [:dev], runtime: false} - ] - end - - defp package do - %{ - licenses: ["MIT"], - contributors: ["zoedsoupe"], - links: %{ - "GitHub" => @source_url, - "Docs" => "https://hexdocs.pm/supabase_potion" - }, - files: ~w[lib mix.exs README.md LICENSE] - } - end - - defp docs do - [ - main: "Supabase", - extras: ["README.md"] - ] - end - - defp description do - """ - Complete Elixir client for Supabase. - """ - end -end diff --git a/apps/supabase_potion/test/test_helper.exs b/apps/supabase_potion/test/test_helper.exs deleted file mode 100644 index 869559e..0000000 --- a/apps/supabase_potion/test/test_helper.exs +++ /dev/null @@ -1 +0,0 @@ -ExUnit.start() diff --git a/apps/supabase_storage/.formatter.exs b/apps/supabase_storage/.formatter.exs deleted file mode 100644 index d2cda26..0000000 --- a/apps/supabase_storage/.formatter.exs +++ /dev/null @@ -1,4 +0,0 @@ -# Used by "mix format" -[ - inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] -] diff --git a/apps/supabase_storage/.gitignore b/apps/supabase_storage/.gitignore deleted file mode 100644 index ea0e15f..0000000 --- a/apps/supabase_storage/.gitignore +++ /dev/null @@ -1,40 +0,0 @@ -# The directory Mix will write compiled artifacts to. -/_build/ - -# If you run "mix test --cover", coverage assets end up here. -/cover/ - -# The directory Mix downloads your dependencies sources to. -/deps/ - -# Where third-party dependencies like ExDoc output generated docs. -/doc/ - -# Ignore .fetch files in case you like to edit your project deps locally. -/.fetch - -# If the VM crashes, it generates a dump, let's ignore it too. -erl_crash.dump - -# Also ignore archive artifacts (built via "mix archive.build"). -*.ez - -# Temporary files, for example, from tests. -/tmp/ - -# Nix files -/.nix-mix/ -/.postgres/ -result - -# Secrets files -.env - -# LSP files -/.lexical/ - -# Nix files -result - -/.elixir_ls/ -/.elixir-tools/ diff --git a/apps/supabase_storage/README.md b/apps/supabase_storage/README.md deleted file mode 100644 index 2711593..0000000 --- a/apps/supabase_storage/README.md +++ /dev/null @@ -1,29 +0,0 @@ -# Supabase Storage - -[Storage]() implementation for the `supabase_potion` SDK in Elixir. - -## Installation - -```elixir -def deps do - [ - {:supabase_potion, "~> 0.2"}, - {:supabase_storage, "~> 0.2"} - ] -end -``` - -## Usage - -Firstly you need to initialize your Supabase client(s) as can be found on the [supabase_potion documentation](): - -```elixir -iex> Supabase.init_client(%{name: Conn, conn: %{base_url: "", api_key: ""}}) -{:ok, #PID<>} -``` - -Now you can pass the Client to the `Supabase.Storage` functions as a `PID` or the name that was registered on the client initialization: - -```elixir -iex> Supabase.Storage.list_buckets(pid | client_name) -``` diff --git a/apps/supabase_storage/lib/supabase/storage.ex b/apps/supabase_storage/lib/supabase/storage.ex deleted file mode 100644 index 4f53ebc..0000000 --- a/apps/supabase_storage/lib/supabase/storage.ex +++ /dev/null @@ -1,636 +0,0 @@ -defmodule Supabase.Storage do - @moduledoc """ - Supabase.Storage Elixir Package - - This module provides integration with the Supabase Storage API, enabling developers - to perform a multitude of operations related to buckets and objects with ease. - - ## Features - - 1. **Bucket Operations**: Methods that allow the creation, listing, and removal of buckets. - 2. **Object Operations**: Functions designed to upload, download, retrieve object information, - and perform move, copy, and remove actions on objects. - - ## Usage - - You can start by creating or managing buckets: - - Supabase.Storage.create_bucket(client, "my_new_bucket") - - Once a bucket is set up, objects within the bucket can be managed: - - Supabase.Storage.upload_object(client, "my_bucket", "path/on/server.png", "path/on/local.png") - - ## Examples - - Here are some basic examples: - - # Removing an object - Supabase.Storage.remove_object(client, "my_bucket", "path/on/server.png") - - # Moving an object - Supabase.Storage.move_object(client, "my_bucket", "path/on/server1.png", "path/on/server2.png") - - Ensure to refer to method-specific documentation for detailed examples and explanations. - - ## Permissions - - Do remember to check and set the appropriate permissions in Supabase to make sure that the - operations can be performed without any hitches. - """ - - import Supabase.Client, only: [is_client: 1] - - alias Supabase.Client - alias Supabase.Client.Conn - alias Supabase.Storage.Bucket - alias Supabase.Storage.BucketHandler - alias Supabase.Storage.Object - alias Supabase.Storage.ObjectHandler - alias Supabase.Storage.ObjectOptions - alias Supabase.Storage.SearchOptions - - @behaviour Supabase.StorageBehaviour - - @doc """ - Retrieves information about all buckets in the current project. - - ## Notes - - * Policy permissions required - * `buckets` permissions: `select` - * `objects` permissions: none - - ## Examples - - iex> Supabase.Storage.list_buckets(client) - {:ok, [%Supabase.Storage.Bucket{...}, ...]} - - iex> Supabase.Storage.list_buckets(invalid_conn) - {:error, reason} - - """ - @impl true - def list_buckets(client) when is_client(client) do - case Client.retrieve_connection(client) do - nil -> - {:error, :invalid_client} - - %Conn{access_token: token, api_key: api_key, base_url: base_url} -> - {:ok, BucketHandler.list(base_url, api_key, token)} - end - end - - @doc """ - Retrieves information about a bucket in the current project. - - ## Notes - - * Policy permissions required - * `buckets` permissions: `select` - * `objects` permissions: none - - ## Examples - - iex> Supabase.Storage.retrieve_bucket_info(client, "avatars") - {:ok, %Supabase.Storage.Bucket{...}} - - iex> Supabase.Storage.retrieve_bucket_info(invalid_conn, "avatars") - {:error, reason} - - """ - @impl true - def retrieve_bucket_info(client, id) when is_client(client) do - case Client.retrieve_connection(client) do - nil -> - {:error, :invalid_client} - - %Conn{access_token: token, api_key: api_key, base_url: base_url} -> - BucketHandler.retrieve_info(base_url, api_key, token, id) - end - end - - @doc """ - Creates a new bucket in the current project given a map of attributes. - - ## Attributes - - * `id`: the id of the bucket to be created, required - * `name`: the name of the bucket to be created, defaults to the `id` provided - * `file_size_limit`: the maximum size of a file in bytes - * `allowed_mime_types`: a list of allowed mime types, defaults to allow all MIME types - * `public`: whether the bucket is public or not, defaults to `false` - - ## Notes - - * Policy permissions required - * `buckets` permissions: `insert` - * `objects` permissions: none - - ## Examples - - iex> Supabase.Storage.create_bucket(client, %{id: "avatars"}) - {:ok, %Supabase.Storage.Bucket{...}} - - iex> Supabase.Storage.create_bucket(invalid_conn, %{id: "avatars"}) - {:error, reason} - - """ - @impl true - def create_bucket(client, attrs) when is_client(client) do - with {:ok, bucket_params} <- Bucket.create_changeset(attrs), - %Conn{access_token: token, api_key: api_key, base_url: base_url} <- - Client.retrieve_connection(client), - {:ok, _} <- BucketHandler.create(base_url, api_key, token, bucket_params) do - retrieve_bucket_info(client, bucket_params.id) - else - nil -> - {:error, :invalid_client} - - {:error, changeset} -> - {:error, changeset} - end - end - - @doc """ - Updates a bucket in the current project given a map of attributes. - - ## Attributes - - * `file_size_limit`: the maximum size of a file in bytes - * `allowed_mime_types`: a list of allowed mime types, defaults to allow all MIME types - * `public`: whether the bucket is public or not, defaults to `false` - - Isn't possible to update a bucket's `id` or `name`. If you want or need this, you should - firstly delete the bucket and then create a new one. - - ## Notes - - * Policy permissions required - * `buckets` permissions: `update` - * `objects` permissions: none - - ## Examples - - iex> Supabase.Storage.update_bucket(client, bucket, %{public: true}) - {:ok, %Supabase.Storage.Bucket{...}} - - iex> Supabase.Storage.update_bucket(invalid_conn, bucket, %{public: true}) - {:error, reason} - - """ - @impl true - def update_bucket(client, bucket, attrs) when is_client(client) do - with {:ok, bucket_params} <- Bucket.update_changeset(bucket, attrs), - %Conn{access_token: token, api_key: api_key, base_url: base_url} <- - Client.retrieve_connection(client), - {:ok, _} <- BucketHandler.update(base_url, api_key, token, bucket.id, bucket_params) do - retrieve_bucket_info(client, bucket.id) - else - nil -> - {:error, :invalid_client} - - {:error, changeset} -> - {:error, changeset} - end - end - - @doc """ - Empties a bucket in the current project. This action deletes all objects in the bucket. - - ## Notes - - * Policy permissions required - * `buckets` permissions: `update` - * `objects` permissions: `delete` - - ## Examples - - iex> Supabase.Storage.empty_bucket(client, bucket) - {:ok, :emptied} - - iex> Supabase.Storage.empty_bucket(invalid_conn, bucket) - {:error, reason} - - """ - @impl true - def empty_bucket(client, %Bucket{} = bucket) when is_client(client) do - case Client.retrieve_connection(client) do - nil -> - {:error, :invalid_client} - - %Conn{access_token: token, api_key: api_key, base_url: base_url} -> - BucketHandler.empty(base_url, api_key, token, bucket.id) - end - end - - @doc """ - Deletes a bucket in the current project. Notice that this also deletes all objects in the bucket. - - ## Notes - - * Policy permissions required - * `buckets` permissions: `delete` - * `objects` permissions: `delete` - - ## Examples - - iex> Supabase.Storage.delete_bucket(client, bucket) - {:ok, :deleted} - - iex> Supabase.Storage.delete_bucket(invalid_conn, bucket) - {:error, reason} - - """ - @impl true - def delete_bucket(client, %Bucket{} = bucket) when is_client(client) do - with %Conn{access_token: token, api_key: api_key, base_url: base_url} <- - Client.retrieve_connection(client), - {:ok, _} <- BucketHandler.delete(base_url, api_key, token, bucket.id) do - {:ok, :deleted} - else - nil -> - {:error, :invalid_client} - - {:error, changeset} -> - {:error, changeset} - end - end - - @doc """ - Removes an object from a bucket in the current project. - - ## Notes - - * Policy permissions required - * `buckets` permissions: none - * `objects` permissions: `delete` - - ## Examples - - iex> Supabase.Storage.remove_object(client, bucket, object) - {:ok, :deleted} - - iex> Supabase.Storage.remove_object(invalid_conn, bucket, object) - {:error, reason} - - """ - @impl true - def remove_object(client, %Bucket{} = bucket, %Object{} = object) when is_client(client) do - case Client.retrieve_connection(client) do - nil -> - {:error, :invalid_client} - - %Conn{access_token: token, api_key: api_key, base_url: base_url} -> - ObjectHandler.remove(base_url, api_key, token, bucket.name, object.path) - end - end - - @doc """ - Moves a object from a bucket and send it to another bucket, in the current project. - Notice that isn't necessary to pass the current bucket, because the object already - contains this information. - - ## Notes - - * Policy permissions required - * `buckets` permissions: none - * `objects` permissions: `delete` and `create` - - ## Examples - - iex> Supabase.Storage.move_object(client, bucket, object) - {:ok, :moved} - - iex> Supabase.Storage.move_object(invalid_conn, bucket, object) - {:error, reason} - - """ - @impl true - def move_object(client, %Bucket{} = bucket, %Object{} = object, to) when is_client(client) do - case Client.retrieve_connection(client) do - nil -> - {:error, :invalid_client} - - %Conn{access_token: token, api_key: api_key, base_url: base_url} -> - ObjectHandler.move(base_url, api_key, token, bucket.name, object.path, to) - end - end - - @doc """ - Copies a object from a bucket and send it to another bucket, in the current project. - Notice that isn't necessary to pass the current bucket, because the object already - contains this information. - - ## Notes - - * Policy permissions required - * `buckets` permissions: none - * `objects` permissions: `create` - - ## Examples - - iex> Supabase.Storage.copy_object(client, bucket, object) - {:ok, :copied} - - iex> Supabase.Storage.copy_object(invalid_conn, bucket, object) - {:error, reason} - - """ - @impl true - def copy_object(client, %Bucket{} = bucket, %Object{} = object, to) when is_client(client) do - case Client.retrieve_connection(client) do - nil -> - {:error, :invalid_client} - - %Conn{access_token: token, api_key: api_key, base_url: base_url} -> - ObjectHandler.copy(base_url, api_key, token, bucket.name, object.path, to) - end - end - - @doc """ - Retrieves information about an object in a bucket in the current project. - - ## Notes - - * Policy permissions required - * `buckets` permissions: none - * `objects` permissions: `select` - - ## Examples - - iex> Supabase.Storage.retrieve_object_info(client, bucket, "some.png") - {:ok, %Supabase.Storage.Object{...}} - - iex> Supabase.Storage.retrieve_object_info(invalid_conn, bucket, "some.png") - {:error, reason} - - """ - @impl true - def retrieve_object_info(client, %Bucket{} = bucket, wildcard) when is_client(client) do - case Client.retrieve_connection(client) do - nil -> - {:error, :invalid_client} - - %Conn{access_token: token, api_key: api_key, base_url: base_url} -> - ObjectHandler.get_info(base_url, api_key, token, bucket.name, wildcard) - end - end - - @doc """ - Lists a set of objects in a bucket in the current project. - - ## Searching - - You can pass a prefix to filter the objects returned. For example, if you have the following - objects in your bucket: - - . - └── bucket/ - ├── avatars/ - │ └── some.png - ├── other.png - └── some.pdf - - And you want to list only the objects inside the `avatars` folder, you can do: - - iex> Supabase.Storage.list_objects(client, bucket, "avatars/") - {:ok, [%Supabase.Storage.Object{...}]} - - Also you can pass some search options as a `Supabase.Storage.SearchOptions` struct. Available - options are: - - * `limit`: the maximum number of objects to return - * `offset`: the number of objects to skip - * `sort_by`: - * `column`: the column to sort by, defaults to `created_at` - * `order`: the order to sort by, defaults to `desc` - - ## Notes - - * Policy permissions required - * `buckets` permissions: none - * `objects` permissions: `select` - - ## Examples - - iex> Supabase.Storage.list_objects(client, bucket) - {:ok, [%Supabase.Storage.Object{...}, ...]} - - iex> Supabase.Storage.list_objects(invalid_conn, bucket) - {:error, reason} - - """ - @impl true - def list_objects(client, %Bucket{} = bucket, prefix \\ "", opts \\ %SearchOptions{}) - when is_client(client) do - case Client.retrieve_connection(client) do - nil -> - {:error, :invalid_client} - - %Conn{access_token: token, api_key: api_key, base_url: base_url} -> - ObjectHandler.list(base_url, api_key, token, bucket.name, prefix, opts) - end - end - - @doc """ - Uploads a file to a bucket in the current project. Notice that you only need to - pass the path to the file you want to upload, as the file will be read in a stream way - to be sent to the server. - - ## Options - - You can pass some options as a `Supabase.Storage.ObjectOptions` struct. Available - options are: - - * `cache_control`: the cache control header value, defaults to `3600` - * `content_type`: the content type header value, defaults to `text/plain;charset=UTF-8` - * `upsert`: whether to overwrite the object if it already exists, defaults to `false` - - ## Notes - - * Policy permissions required - * `buckets` permissions: none - * `objects` permissions: `insert` - - ## Examples - - iex> Supabase.Storage.upload_object(client, bucket, "avatars/some.png", "path/to/file.png") - {:ok, %Supabase.Storage.Object{...}} - - iex> Supabase.Storage.upload_object(invalid_conn, bucket, "avatars/some.png", "path/to/file.png") - {:error, reason} - - """ - @impl true - def upload_object(client, %Bucket{} = bucket, path, file, opts \\ %ObjectOptions{}) - when is_client(client) do - case Client.retrieve_connection(client) do - nil -> - {:error, :invalid_client} - - %Conn{access_token: token, api_key: api_key, base_url: base_url} -> - file = Path.expand(file) - ObjectHandler.create_file(base_url, api_key, token, bucket.name, path, file, opts) - end - end - - @doc """ - Downloads an object from a bucket in the current project. That return a binary that - represents the object content. - - ## Notes - - * Policy permissions required - * `buckets` permissions: none - * `objects` permissions: `select` - - ## Examples - - iex> Supabase.Storage.download_object(client, %Bucket{}, "avatars/some.png") - {:ok, <<>>} - - iex> Supabase.Storage.download_object(invalid_conn, %Bucket{}, "avatars/some.png") - {:error, reason} - - """ - @impl true - def download_object(client, %Bucket{} = bucket, wildcard) when is_client(client) do - case Client.retrieve_connection(client) do - nil -> - {:error, :invalid_client} - - %Conn{access_token: token, api_key: api_key, base_url: base_url} -> - ObjectHandler.get(base_url, api_key, token, bucket.name, wildcard) - end - end - - @doc """ - Downloads an object from a bucket in the current project. That return a stream that - represents the object content. Notice that the request to the server is only made - when you start to consume the stream. - - ## Notes - - * Policy permissions required - * `buckets` permissions: none - * `objects` permissions: `select` - - ## Examples - - iex> Supabase.Storage.download_object_lazy(client, %Bucket{}, "avatars/some.png") - {:ok, #Function<59.128620087/2 in Stream.resource/3>} - - iex> Supabase.Storage.download_object_lazy(invalid_conn, %Bucket{}, "avatars/some.png") - {:error, reason} - - """ - @impl true - def download_object_lazy(client, %Bucket{} = bucket, wildcard) when is_client(client) do - case Client.retrieve_connection(client) do - nil -> - {:error, :invalid_client} - - %Conn{access_token: token, api_key: api_key, base_url: base_url} -> - ObjectHandler.get_lazy(base_url, api_key, token, bucket.name, wildcard) - end - end - - @doc """ - Saves an object from a bucket in the current project to a file in the local filesystem. - - ## Notes - - * Policy permissions required - * `buckets` permissions: none - * `objects` permissions: `select` - - ## Examples - - iex> Supabase.Storage.save_object(client, "./some.png", %Bucket{}, "avatars/some.png") - :ok - - iex> Supabase.Storage.save_object(client, "./some.png", %Bucket{}, "do_not_exist.png") - {:error, reason} - - """ - @impl true - def save_object(client, path, %Bucket{} = bucket, wildcard) when is_client(client) do - with {:ok, bin} <- download_object(client, bucket, wildcard) do - File.write(Path.expand(path), bin) - end - end - - @doc """ - Saves an object from a bucket in the current project to a file in the local filesystem. - Notice that the request to the server is only made when you start to consume the stream. - - ## Notes - - * Policy permissions required - * `buckets` permissions: none - * `objects` permissions: `select` - - ## Examples - - iex> Supabase.Storage.save_object_stream(client, "./some.png", %Bucket{}, "avatars/some.png") - :ok - - iex> Supabase.Storage.save_object_stream(client, "./some.png", %Bucket{}, "do_not_exist.png") - {:error, reason} - - """ - @impl true - def save_object_stream(client, path, %Bucket{} = bucket, wildcard) when is_client(client) do - with {:ok, stream} <- download_object_lazy(client, bucket, wildcard) do - fs = File.stream!(Path.expand(path)) - - stream - |> Stream.into(fs) - |> Stream.run() - end - end - - @doc """ - Creates a signed URL for an object in a bucket in the current project. This URL can - be used to perform an HTTP request to the object, without the need of authentication. - Usually this is used to allow users to download objects from a bucket. - - ## Notes - - * Policy permissions required - * `buckets` permissions: none - * `objects` permissions: `select` - - ## Examples - - iex> Supabase.Storage.create_signed_url(client, bucket, "avatars/some.png", 3600) - {:ok, "https://.supabase.co"/object/sign//?token=} - - iex> Supabase.Storage.create_signed_url(invalid_client, bucket, "avatars/some.png", 3600) - {:error, :invalid_client} - - """ - @impl true - def create_signed_url(client, %Bucket{} = bucket, path, expires_in) when is_client(client) do - with %Conn{access_token: token, api_key: api_key, base_url: base_url} <- - Client.retrieve_connection(client), - {:ok, sign_url} <- - ObjectHandler.create_signed_url( - base_url, - api_key, - token, - bucket.name, - path, - expires_in - ) do - {:ok, URI.to_string(URI.merge(base_url, sign_url))} - else - nil -> - {:error, :invalid_client} - - err -> - err - end - end -end diff --git a/apps/supabase_storage/lib/supabase/storage/action_error.ex b/apps/supabase_storage/lib/supabase/storage/action_error.ex deleted file mode 100644 index 811f371..0000000 --- a/apps/supabase_storage/lib/supabase/storage/action_error.ex +++ /dev/null @@ -1,5 +0,0 @@ -defmodule Supabase.Storage.ActionError do - @moduledoc "Represents an Error on a Supabase Storage Action" - - defexception [:message] -end diff --git a/apps/supabase_storage/lib/supabase/storage/application.ex b/apps/supabase_storage/lib/supabase/storage/application.ex deleted file mode 100644 index 8b83f1e..0000000 --- a/apps/supabase_storage/lib/supabase/storage/application.ex +++ /dev/null @@ -1,34 +0,0 @@ -defmodule Supabase.Storage.Application do - @moduledoc false - - use Application - - @default_cache_size 100 - @default_buckets_reload_interval 60_000 - - @impl true - def start(_type, _args) do - children = [ - if(start_cache?(), do: {Storage.Cache, cache_max_size: cache_max_size()}), - if(start_cache?(), do: {Storage.CacheSupervisor, reload_interval: reload_interval()}) - ] - - opts = [strategy: :one_for_one, name: Supabase.Storage.Supervisor] - - children - |> Enum.reject(&is_nil/1) - |> Supervisor.start_link(opts) - end - - defp cache_max_size do - Application.get_env(:supabase, :storage)[:cache_max_size] || @default_cache_size - end - - defp start_cache? do - Application.get_env(:supabase, :storage)[:cache_buckets?] - end - - defp reload_interval do - Application.get_env(:supabase, :storage)[:reload_interval] || @default_buckets_reload_interval - end -end diff --git a/apps/supabase_storage/lib/supabase/storage/bucket.ex b/apps/supabase_storage/lib/supabase/storage/bucket.ex deleted file mode 100644 index c362b93..0000000 --- a/apps/supabase_storage/lib/supabase/storage/bucket.ex +++ /dev/null @@ -1,127 +0,0 @@ -defmodule Supabase.Storage.Bucket do - @moduledoc """ - Represents a Bucket on Supabase Storage. - - This module defines the structure and operations related to a storage bucket on Supabase. - - ## Structure - - A `Bucket` consists of: - - - `id`: The unique identifier for the bucket. - - `name`: The display name of the bucket. - - `owner`: The owner of the bucket. - - `file_size_limit`: The maximum file size allowed in the bucket (in bytes). Can be `nil` for no limit. - - `allowed_mime_types`: List of MIME types permitted in this bucket. Can be `nil` for no restrictions. - - `created_at`: Timestamp indicating when the bucket was created. - - `updated_at`: Timestamp indicating the last update to the bucket. - - `public`: Boolean flag determining if the bucket is publicly accessible or not. - - ## Functions - - - `parse!/1`: Parses and returns a bucket structure. - - `create_changeset/1`: Generates a changeset for creating a bucket. - - `update_changeset/2`: Generates a changeset for updating an existing bucket. - - ## Examples - - ### Parsing a bucket - - bucket_attrs = %{ - id: "bucket_id", - name: "My Bucket", - ... - } - Supabase.Storage.Bucket.parse!(bucket_attrs) - - ### Creating a bucket changeset - - new_bucket_attrs = %{ - id: "new_bucket_id", - ... - } - Supabase.Storage.Bucket.create_changeset(new_bucket_attrs) - - ### Updating a bucket - - existing_bucket = %Supabase.Storage.Bucket{ - id: "existing_bucket_id", - ... - } - updated_attrs = %{ - public: true - } - Supabase.Storage.Bucket.update_changeset(existing_bucket, updated_attrs) - """ - - use Ecto.Schema - - import Ecto.Changeset - - @type t :: %__MODULE__{ - id: String.t(), - name: String.t(), - owner: String.t(), - file_size_limit: integer | nil, - allowed_mime_types: list(String.t()) | nil, - created_at: NaiveDateTime.t(), - updated_at: NaiveDateTime.t(), - public: boolean - } - - @fields ~w(id name created_at updated_at file_size_limit allowed_mime_types public owner)a - @create_fields ~w(id name file_size_limit allowed_mime_types public)a - @update_fields ~w(file_size_limit allowed_mime_types public)a - - @primary_key false - embedded_schema do - field(:id, :string) - field(:name, :string) - field(:owner, :string) - field(:file_size_limit, :integer) - field(:allowed_mime_types, {:array, :string}) - field(:created_at, :naive_datetime) - field(:updated_at, :naive_datetime) - field(:public, :boolean, default: false) - end - - @spec parse!(map) :: t - def parse!(attrs) do - %__MODULE__{} - |> cast(attrs, @fields) - |> apply_action!(:parse) - end - - @spec create_changeset(map) :: {:ok, map} | {:error, Ecto.Changeset.t()} - def create_changeset(attrs) do - %__MODULE__{} - |> cast(attrs, @create_fields) - |> validate_required([:id]) - |> maybe_put_name() - |> apply_action(:create) - |> case do - {:ok, data} -> {:ok, Map.take(data, @create_fields)} - err -> err - end - end - - defp maybe_put_name(changeset) do - if get_change(changeset, :name) do - changeset - else - id = get_change(changeset, :id) - put_change(changeset, :name, id) - end - end - - @spec update_changeset(t, map) :: {:ok, map} | {:error, Ecto.Changeset.t()} - def update_changeset(%__MODULE__{} = bucket, attrs) do - bucket - |> cast(attrs, @update_fields) - |> apply_action(:update) - |> case do - {:ok, data} -> {:ok, Map.take(data, @update_fields)} - err -> err - end - end -end diff --git a/apps/supabase_storage/lib/supabase/storage/cache.ex b/apps/supabase_storage/lib/supabase/storage/cache.ex deleted file mode 100644 index 6152a7d..0000000 --- a/apps/supabase_storage/lib/supabase/storage/cache.ex +++ /dev/null @@ -1,97 +0,0 @@ -defmodule Supabase.Storage.Cache do - @moduledoc """ - Provides caching mechanisms for Supabase Storage Buckets. - - This module acts as a GenServer that offers caching capabilities, especially for bucket-related operations in Supabase Storage. The caching is backed by the `:ets` (Erlang Term Storage) to provide in-memory storage and fast retrieval of cached data. - - ## Features - - - **Bucket Caching**: Store and retrieve buckets by their unique identifier. - - **Cache Flushing**: Clear the cache when necessary. - - **Configurable Cache Size**: Limit the number of items that can be stored in the cache. - - ## Usage - - ### Starting the Cache Server - - Supabase.Storage.Cache.start_link(%{cache_max_size: 200}) - - ### Caching Buckets - - buckets = [%{id: "bucket_1", ...}, %{id: "bucket_2", ...}] - Supabase.Storage.Cache.cache_buckets(buckets) - - ### Retrieving a Cached Bucket by ID - - Supabase.Storage.Cache.find_bucket_by_id("bucket_1") - - ### Clearing the Cache - - Supabase.Storage.Cache.flush() - - ## Implementation Details - - The cache uses the `:ets` module for in-memory storage of buckets. The number of buckets cached is controlled by the `:cache_max_size` option (default: 100). When the cache is close to exceeding its maximum size, older entries are removed to accommodate new ones. - """ - - use GenServer - - ## Client - - def start_link(args) do - GenServer.start_link(__MODULE__, args, name: __MODULE__) - end - - def find_bucket_by_id(id) do - GenServer.call(__MODULE__, {:find_bucket, id: id}) - end - - def cache_buckets(buckets) do - GenServer.cast(__MODULE__, {:cache_buckets, buckets}) - end - - def flush do - GenServer.cast(__MODULE__, :flush) - end - - ## API - - @impl true - def init(args) do - Process.flag(:trap_exit, true) - table = :ets.new(:buckets_cache, [:set, :public, :named_table]) - max_size = Keyword.get(args, :cache_max_size, 100) - {:ok, %{table: table, max_size: max_size, size: 0}} - end - - @impl true - def handle_cast(:flush, table) do - :ets.delete_all_objects(table) - {:noreply, table} - end - - def handle_cast({:cache_buckets, buckets}, state) do - if overflowed_max_size?(state, buckets) do - :ets.delete_all_objects(state.table) - end - - # prefer atomic operations - for bucket <- buckets do - :ets.insert_new(state.table, {bucket.id, bucket}) - end - - {:noreply, %{state | size: length(buckets)}} - end - - defp overflowed_max_size?(state, buckets) do - state.size + length(buckets) > state.max_size - end - - @impl true - def handle_call({:find_bucket, id: id}, _from, state) do - bucket = :ets.lookup_element(state.table, id, 2) - {:reply, bucket, state} - rescue - _ -> {:reply, nil, state} - end -end diff --git a/apps/supabase_storage/lib/supabase/storage/cache_reloader.ex b/apps/supabase_storage/lib/supabase/storage/cache_reloader.ex deleted file mode 100644 index 137ade1..0000000 --- a/apps/supabase_storage/lib/supabase/storage/cache_reloader.ex +++ /dev/null @@ -1,51 +0,0 @@ -defmodule Supabase.Storage.CacheReloader do - @moduledoc """ - Periodically reloads and updates the bucket cache for Supabase Storage. - - This module acts as a GenServer that schedules periodic tasks to reload and update the cache for Supabase Storage Buckets. It collaborates with the `Supabase.Storage.Cache` to ensure that the cached data remains fresh and updated. - - ## Features - - - **Automatic Cache Reloading**: Periodically reloads the buckets from Supabase Storage and updates the cache. - - **Configurable Reload Interval**: The time interval between successive cache reloads can be specified. - - ## Usage - - ### Starting the CacheReloader Server - - Supabase.Storage.CacheReloader.start_link(%{reload_interval: 2_000}) - - ## Implementation Details - - By default, the reload interval is set to 1 second (`@ttl`). This means the cache will be updated every second with the latest data from Supabase Storage. This interval can be configured during the server start using the `:reload_interval` option. - - The server interacts with `Supabase.Storage.list_buckets/1` to fetch the list of buckets and then updates the cache using `Supabase.Storage.Cache.cache_buckets/1`. - """ - - use GenServer - - alias Supabase.Storage.Cache - - # @ttl 60_000 - @ttl 1_000 - - def start_link(args) do - GenServer.start_link(__MODULE__, args, name: __MODULE__) - end - - @impl true - def init(args) do - Process.flag(:trap_exit, true) - interval = Keyword.get(args, :reload_interval, @ttl) - Process.send_after(self(), :reload, interval) - {:ok, interval} - end - - @impl true - def handle_info(:reload, interval) do - {:ok, buckets} = Supabase.Storage.list_buckets(Supabase.Connection) - :ok = Cache.cache_buckets(buckets) - Process.send_after(self(), :reload, interval) - {:noreply, interval} - end -end diff --git a/apps/supabase_storage/lib/supabase/storage/endpoints.ex b/apps/supabase_storage/lib/supabase/storage/endpoints.ex deleted file mode 100644 index f3271bf..0000000 --- a/apps/supabase_storage/lib/supabase/storage/endpoints.ex +++ /dev/null @@ -1,51 +0,0 @@ -defmodule Supabase.Storage.Endpoints do - @moduledoc "Defines the Endpoints for the Supabase Storage API" - - def bucket_path do - "/storage/v1/bucket" - end - - def bucket_path_with_id(id) do - "/storage/v1/bucket/#{id}" - end - - def bucket_path_to_empty(id) do - bucket_path_with_id(id) <> "/empty" - end - - def file_upload_url(path) do - "/storage/v1/object/upload/sign/#{path}" - end - - def file_move do - "/storage/v1/object/move" - end - - def file_copy do - "/storage/v1/object/copy" - end - - def file_upload(bucket, path) do - "/storage/v1/object/#{bucket}/#{path}" - end - - def file_info(bucket, wildcard) do - "/storage/v1/object/info/authenticated/#{bucket}/#{wildcard}" - end - - def file_list(bucket) do - "/storage/v1/object/list/#{bucket}" - end - - def file_remove(bucket) do - "/storage/v1/object/#{bucket}" - end - - def file_signed_url(bucket, path) do - "/storage/v1/object/sign/#{bucket}/#{path}" - end - - def file_download(bucket, wildcard) do - "/storage/v1/object/authenticated/#{bucket}/#{wildcard}" - end -end diff --git a/apps/supabase_storage/lib/supabase/storage/handlers/bucket_handler.ex b/apps/supabase_storage/lib/supabase/storage/handlers/bucket_handler.ex deleted file mode 100644 index 02dbe0e..0000000 --- a/apps/supabase_storage/lib/supabase/storage/handlers/bucket_handler.ex +++ /dev/null @@ -1,133 +0,0 @@ -defmodule Supabase.Storage.BucketHandler do - @moduledoc """ - Provides low-level API functions for managing Supabase Storage buckets. - - The `BucketHandler` module offers a collection of functions that directly interact with the Supabase Storage API for managing buckets. This module works closely with the `Supabase.Fetcher` for sending HTTP requests and the `Supabase.Storage.Cache` for caching bucket information. - - ## Features - - - **Bucket Listing**: Fetch a list of all the buckets available in the storage. - - **Bucket Retrieval**: Retrieve detailed information about a specific bucket. - - **Bucket Creation**: Create a new bucket with specified attributes. - - **Bucket Update**: Modify the attributes of an existing bucket. - - **Bucket Emptying**: Empty the contents of a bucket without deleting the bucket itself. - - **Bucket Deletion**: Permanently remove a bucket and its contents. - - ## Caution - - This module provides a low-level interface to Supabase Storage buckets and is designed for internal use by the `Supabase.Storage` module. Direct use is discouraged unless you need to perform custom or unsupported actions that are not available through the higher-level API. Incorrect use can lead to unexpected results or data loss. - - ## Implementation Details - - All functions within the module expect a base URL, API key, and access token as their initial arguments, followed by any additional arguments required for the specific operation. Responses are usually in the form of `{:ok, result}` or `{:error, message}` tuples. - """ - - alias Supabase.Connection, as: Conn - alias Supabase.Fetcher - alias Supabase.Storage.Bucket - alias Supabase.Storage.Cache - alias Supabase.Storage.Endpoints - - @type bucket_id :: String.t() - @type bucket_name :: String.t() - @type create_attrs :: %{ - id: String.t(), - name: String.t(), - file_size_limit: integer | nil, - allowed_mime_types: list(String.t()) | nil, - public: boolean - } - @type update_attrs :: %{ - public: boolean | nil, - file_size_limit: integer | nil, - allowed_mime_types: list(String.t()) | nil - } - - @spec list(Conn.base_url(), Conn.api_key(), Conn.access_token()) :: - {:ok, [Bucket.t()]} | {:error, String.t()} - def list(base_url, api_key, token) do - url = Fetcher.get_full_url(base_url, Endpoints.bucket_path()) - headers = Fetcher.apply_headers(api_key, token) - - url - |> Fetcher.get(headers) - |> case do - {:ok, body} -> {:ok, Enum.map(body, &Bucket.parse!/1)} - {:error, msg} -> {:error, msg} - end - end - - @spec retrieve_info(Conn.base_url(), Conn.api_key(), Conn.access_token(), bucket_id) :: - {:ok, Bucket.t()} | {:error, String.t()} - def retrieve_info(base_url, api_key, token, bucket_id) do - if bucket = Cache.find_bucket_by_id(bucket_id) do - {:ok, bucket} - else - url = Fetcher.get_full_url(base_url, Endpoints.bucket_path_with_id(bucket_id)) - headers = Fetcher.apply_headers(api_key, token) - - url - |> Fetcher.get(headers) - |> case do - {:ok, body} -> {:ok, Bucket.parse!(body)} - {:error, msg} -> {:error, msg} - end - end - end - - @spec create(Conn.base_url(), Conn.api_key(), Conn.access_token(), create_attrs) :: - {:ok, Bucket.t()} | {:error, String.t()} - def create(base_url, api_key, token, attrs) do - url = Fetcher.get_full_url(base_url, Endpoints.bucket_path()) - headers = Fetcher.apply_headers(api_key, token) - - url - |> Fetcher.post(attrs, headers) - |> case do - {:ok, resp} -> {:ok, resp} - {:error, msg} -> {:error, msg} - end - end - - @spec update(Conn.base_url(), Conn.api_key(), Conn.access_token(), bucket_id, update_attrs) :: - {:ok, Bucket.t()} | {:error, String.t()} - def update(base_url, api_key, token, id, attrs) do - url = Fetcher.get_full_url(base_url, Endpoints.bucket_path_with_id(id)) - headers = Fetcher.apply_headers(api_key, token) - - url - |> Fetcher.put(attrs, headers) - |> case do - {:ok, message} -> {:ok, message} - {:error, msg} -> {:error, msg} - end - end - - @spec empty(Conn.base_url(), Conn.api_key(), Conn.access_token(), bucket_id) :: - {:ok, :successfully_emptied} | {:error, String.t()} - def empty(base_url, api_key, token, id) do - url = Fetcher.get_full_url(base_url, Endpoints.bucket_path_to_empty(id)) - headers = Fetcher.apply_headers(api_key, token) - - url - |> Fetcher.post(nil, headers) - |> case do - {:ok, _message} -> {:ok, :successfully_emptied} - {:error, msg} -> {:error, msg} - end - end - - @spec delete(Conn.base_url(), Conn.api_key(), Conn.access_token(), bucket_id) :: - {:ok, String.t()} | {:error, String.t()} - def delete(base_url, api_key, token, id) do - url = Fetcher.get_full_url(base_url, Endpoints.bucket_path_with_id(id)) - headers = Fetcher.apply_headers(api_key, token) - - url - |> Fetcher.delete(nil, headers) - |> case do - {:ok, body} -> {:ok, body} - {:error, msg} -> {:error, msg} - end - end -end diff --git a/apps/supabase_storage/lib/supabase/storage/handlers/object_handler.ex b/apps/supabase_storage/lib/supabase/storage/handlers/object_handler.ex deleted file mode 100644 index 111eda0..0000000 --- a/apps/supabase_storage/lib/supabase/storage/handlers/object_handler.ex +++ /dev/null @@ -1,226 +0,0 @@ -defmodule Supabase.Storage.ObjectHandler do - @moduledoc """ - A low-level API interface for managing objects within a Supabase bucket. - - ## Responsibilities - - - **File Management**: Create, move, copy, and get information about files in a bucket. - - **Object Listing**: List objects based on certain criteria, like a prefix. - - **Object Removal**: Delete specific objects or a list of objects. - - **URL Management**: Generate signed URLs for granting temporary access to objects. - - **Content Access**: Retrieve the content of an object or stream it. - - ## Usage Warning - - This module is meant for internal use or for developers requiring more control over object management in Supabase. In general, users should work with the higher-level Supabase.Storage API when possible, as it may offer better abstractions and safety mechanisms. - - Directly interfacing with this module bypasses any additional logic the main API might provide. Use it with caution and ensure you understand its operations. - """ - - alias Supabase.Connection, as: Conn - alias Supabase.Fetcher - alias Supabase.Storage.Endpoints - alias Supabase.Storage.Object - alias Supabase.Storage.ObjectOptions, as: Opts - alias Supabase.Storage.SearchOptions, as: Search - - @type bucket_name :: String.t() - @type object_path :: Path.t() - @type file_path :: Path.t() - @type opts :: Opts.t() - @type search_opts :: Search.t() - @type wildcard :: String.t() - @type prefix :: String.t() - - @spec create_file( - Conn.base_url(), - Conn.api_key(), - Conn.access_token(), - bucket_name, - object_path, - file_path, - opts - ) :: - {:ok, Object.t()} | {:error, String.t()} - def create_file(url, api_key, token, bucket, object_path, file_path, %Opts{} = opts) do - url = Fetcher.get_full_url(url, Endpoints.file_upload(bucket, object_path)) - - headers = - Fetcher.apply_headers(api_key, token, [ - {"cache-control", "max-age=#{opts.cache_control}"}, - {"content-type", opts.content_type}, - {"x-upsert", to_string(opts.upsert)} - ]) - - Fetcher.upload(:post, url, file_path, headers) - rescue - File.Error -> {:error, :file_not_found} - end - - @spec move( - Conn.base_url(), - Conn.api_key(), - Conn.access_token(), - bucket_name, - object_path, - object_path - ) :: - {:ok, :moved} | {:error, String.t()} - def move(base_url, api_key, token, bucket_id, path, to) do - url = Fetcher.get_full_url(base_url, Endpoints.file_move()) - headers = Fetcher.apply_headers(api_key, token) - body = %{bucket_id: bucket_id, source_key: path, destination_key: to} - - url - |> Fetcher.post(body, headers) - |> case do - {:ok, _} -> {:ok, :moved} - {:error, msg} -> {:error, msg} - end - end - - @spec copy( - Conn.base_url(), - Conn.api_key(), - Conn.access_token(), - bucket_name, - object_path, - object_path - ) :: - {:ok, :copied} | {:error, String.t()} - def copy(base_url, api_key, token, bucket_id, path, to) do - url = Fetcher.get_full_url(base_url, Endpoints.file_copy()) - headers = Fetcher.apply_headers(api_key, token) - body = %{bucket_id: bucket_id, source_key: path, destination_key: to} - - url - |> Fetcher.post(body, headers) - |> case do - {:ok, _} -> {:ok, :copied} - {:error, msg} -> {:error, msg} - end - end - - @spec get_info( - Conn.base_url(), - Conn.api_key(), - Conn.access_token(), - bucket_name, - wildcard - ) :: - {:ok, Object.t()} | {:error, String.t()} - def get_info(base_url, api_key, token, bucket_name, wildcard) do - url = Fetcher.get_full_url(base_url, Endpoints.file_info(bucket_name, wildcard)) - headers = Fetcher.apply_headers(api_key, token) - - url - |> Fetcher.get(headers) - |> case do - {:ok, data} -> {:ok, Object.parse!(data)} - {:error, msg} -> {:error, msg} - end - end - - @spec list( - Conn.base_url(), - Conn.api_key(), - Conn.access_token(), - bucket_name, - prefix, - search_opts - ) :: - {:ok, [Object.t()]} | {:error, String.t()} - def list(base_url, api_key, token, bucket_name, prefix, %Search{} = opts) do - url = Fetcher.get_full_url(base_url, Endpoints.file_list(bucket_name)) - headers = Fetcher.apply_headers(api_key, token) - body = Map.merge(%{prefix: prefix}, Map.from_struct(opts)) - - url - |> Fetcher.post(body, headers) - |> case do - {:ok, data} -> {:ok, Enum.map(data, &Object.parse!/1)} - {:error, msg} -> {:error, msg} - end - end - - @spec remove( - Conn.base_url(), - Conn.api_key(), - Conn.access_token(), - bucket_name, - object_path - ) :: - {:ok, :deleted} | {:error, String.t()} - def remove(base_url, api_key, token, bucket_name, path) do - remove_list(base_url, api_key, token, bucket_name, [path]) - end - - @spec remove_list( - Conn.base_url(), - Conn.api_key(), - Conn.access_token(), - bucket_name, - list(object_path) - ) :: - {:ok, :deleted} | {:error, String.t()} - def remove_list(base_url, api_key, token, bucket_name, paths) do - url = Fetcher.get_full_url(base_url, Endpoints.file_remove(bucket_name)) - headers = Fetcher.apply_headers(api_key, token) - - url - |> Fetcher.delete(%{prefixes: paths}, headers) - |> case do - {:ok, _} -> {:ok, :deleted} - {:error, msg} -> {:error, msg} - end - end - - @spec create_signed_url( - Conn.base_url(), - Conn.api_key(), - Conn.access_token(), - bucket_name, - object_path, - integer - ) :: - {:ok, String.t()} | {:error, String.t()} - def create_signed_url(base_url, api_key, token, bucket_name, path, expires_in) do - url = Fetcher.get_full_url(base_url, Endpoints.file_signed_url(bucket_name, path)) - headers = Fetcher.apply_headers(api_key, token) - - url - |> Fetcher.post(%{expiresIn: expires_in}, headers) - |> case do - {:ok, data} -> {:ok, data["signedURL"]} - {:error, msg} -> {:error, msg} - end - end - - @spec get(Conn.base_url(), Conn.api_key(), Conn.access_token(), bucket_name, object_path) :: - {:ok, binary} | {:error, String.t()} - def get(base_url, api_key, token, bucket_name, wildcard) do - url = Fetcher.get_full_url(base_url, Endpoints.file_download(bucket_name, wildcard)) - headers = Fetcher.apply_headers(api_key, token) - - url - |> Fetcher.get(headers) - |> case do - {:ok, data} -> {:ok, data} - {:error, msg} -> {:error, msg} - end - end - - @spec get_lazy( - Conn.base_url(), - Conn.api_key(), - Conn.access_token(), - bucket_name, - wildcard - ) :: - {:ok, Stream.t()} | {:error, atom} - def get_lazy(base_url, api_key, token, bucket_name, wildcard) do - url = Fetcher.get_full_url(base_url, Endpoints.file_download(bucket_name, wildcard)) - headers = Fetcher.apply_headers(api_key, token) - Fetcher.stream(url, headers) - end -end diff --git a/apps/supabase_storage/lib/supabase/storage/object.ex b/apps/supabase_storage/lib/supabase/storage/object.ex deleted file mode 100644 index 3ca142d..0000000 --- a/apps/supabase_storage/lib/supabase/storage/object.ex +++ /dev/null @@ -1,75 +0,0 @@ -defmodule Supabase.Storage.Object do - @moduledoc """ - Represents an Object within a Supabase Storage Bucket. - - This module encapsulates the structure and operations related to an object or file stored within a Supabase Storage bucket. - - ## Structure - - An `Object` has the following attributes: - - - `id`: The unique identifier for the object. - - `path`: The path to the object within its storage bucket. - - `bucket_id`: The ID of the bucket that houses this object. - - `name`: The name or title of the object. - - `owner`: The owner or uploader of the object. - - `metadata`: A map containing meta-information about the object (e.g., file type, size). - - `created_at`: Timestamp indicating when the object was first uploaded or created. - - `updated_at`: Timestamp indicating the last time the object was updated. - - `last_accessed_at`: Timestamp of when the object was last accessed or retrieved. - - ## Functions - - - `parse!/1`: Accepts a map of attributes and constructs a structured `Object`. - - ## Examples - - ### Parsing an object - - object_attrs = %{ - id: "obj_id", - path: "/folder/my_file.txt", - bucket_id: "bucket_123", - ... - } - Supabase.Storage.Object.parse!(object_attrs) - """ - - use Ecto.Schema - - import Ecto.Changeset, only: [cast: 3, apply_action!: 2] - - @type t :: %__MODULE__{ - id: String.t(), - path: Path.t(), - bucket_id: String.t(), - name: String.t(), - owner: String.t(), - metadata: map(), - created_at: NaiveDateTime.t(), - updated_at: NaiveDateTime.t(), - last_accessed_at: NaiveDateTime.t() - } - - @fields ~w(id path bucket_id name owner created_at updated_at metadata last_accessed_at)a - - @primary_key false - embedded_schema do - field(:path, :string) - field(:id, :string) - field(:bucket_id, :string) - field(:name, :string) - field(:owner, :string) - field(:metadata, :map) - field(:created_at, :naive_datetime) - field(:updated_at, :naive_datetime) - field(:last_accessed_at, :naive_datetime) - end - - @spec parse!(map) :: t - def parse!(attrs) do - %__MODULE__{} - |> cast(attrs, @fields) - |> apply_action!(:parse) - end -end diff --git a/apps/supabase_storage/lib/supabase/storage/object_options.ex b/apps/supabase_storage/lib/supabase/storage/object_options.ex deleted file mode 100644 index 253c506..0000000 --- a/apps/supabase_storage/lib/supabase/storage/object_options.ex +++ /dev/null @@ -1,57 +0,0 @@ -defmodule Supabase.Storage.ObjectOptions do - @moduledoc """ - Represents the configurable options for an Object within Supabase Storage. - - This module encapsulates options that can be set or modified for a storage object. These options help in controlling behavior such as caching, content type, and whether to upsert an object. - - ## Structure - - An `ObjectOptions` consists of the following attributes: - - - `cache_control`: Specifies directives for caching mechanisms in both requests and responses. Default is `"3600"`. - - `content_type`: Specifies the media type of the resource or data. Default is `"text/plain;charset=UTF-8"`. - - `upsert`: A boolean that, when set to `true`, will insert the object if it does not exist, or update it if it does. Default is `true`. - - ## Functions - - - `parse!/1`: Accepts a map of attributes and constructs a structured `ObjectOptions`. - - ## Examples - - ### Parsing object options - - options_attrs = %{ - cache_control: "no-cache", - content_type: "application/json", - upsert: false - } - Supabase.Storage.ObjectOptions.parse!(options_attrs) - """ - - use Ecto.Schema - - import Ecto.Changeset, only: [cast: 3, apply_action!: 2] - - @type t :: %__MODULE__{ - cache_control: String.t(), - content_type: String.t(), - upsert: boolean() - } - - @fields ~w(cache_control content_type upsert)a - - @derive Jason.Encoder - @primary_key false - embedded_schema do - field(:cache_control, :string, default: "3600") - field(:content_type, :string, default: "text/plain;charset=UTF-8") - field(:upsert, :boolean, default: true) - end - - @spec parse!(map) :: t - def parse!(attrs) do - %__MODULE__{} - |> cast(attrs, @fields) - |> apply_action!(:parse) - end -end diff --git a/apps/supabase_storage/lib/supabase/storage/search_options.ex b/apps/supabase_storage/lib/supabase/storage/search_options.ex deleted file mode 100644 index 25f8339..0000000 --- a/apps/supabase_storage/lib/supabase/storage/search_options.ex +++ /dev/null @@ -1,60 +0,0 @@ -defmodule Supabase.Storage.SearchOptions do - @moduledoc """ - Represents the search options for querying objects within Supabase Storage. - - This module encapsulates various options that aid in fetching and sorting storage objects. These options include specifying the limit on the number of results, an offset for pagination, and a sorting directive. - - ## Structure - - A `SearchOptions` consists of the following attributes: - - - `limit`: Specifies the maximum number of results to return. Default is `100`. - - `offset`: Specifies the number of results to skip before starting to fetch the result set. Useful for implementing pagination. Default is `0`. - - `sort_by`: A map that provides a sorting directive. It defines which column should be used for sorting and the order (ascending or descending). Default is `%{column: "name", order: "asc"}`. - - ## Functions - - - `parse!/1`: Accepts a map of attributes and constructs a structured `SearchOptions`. - - ## Examples - - ### Parsing search options - - search_attrs = %{ - limit: 50, - offset: 10, - sort_by: %{column: "created_at", order: "desc"} - } - Supabase.Storage.SearchOptions.parse!(search_attrs) - """ - - use Ecto.Schema - - import Ecto.Changeset, only: [cast: 3, apply_action!: 2] - - @type t :: %__MODULE__{ - limit: integer(), - offset: integer(), - sort_by: %{ - column: String.t(), - order: String.t() - } - } - - @fields ~w(limit offset sort_by)a - - @primary_key false - @derive Jason.Encoder - embedded_schema do - field(:limit, :integer, default: 100) - field(:offset, :integer, default: 0) - field(:sort_by, :map, default: %{column: "name", order: "asc"}) - end - - @spec parse!(map) :: t - def parse!(attrs) do - %__MODULE__{} - |> cast(attrs, @fields) - |> apply_action!(:parse) - end -end diff --git a/apps/supabase_storage/lib/supabase/storage_behaviour.ex b/apps/supabase_storage/lib/supabase/storage_behaviour.ex deleted file mode 100644 index 7dee4fb..0000000 --- a/apps/supabase_storage/lib/supabase/storage_behaviour.ex +++ /dev/null @@ -1,43 +0,0 @@ -defmodule Supabase.StorageBehaviour do - @moduledoc "Defines Supabase Storage Client callbacks" - - alias Supabase.Storage.Bucket - alias Supabase.Storage.Object - alias Supabase.Storage.ObjectOptions, as: Opts - alias Supabase.Storage.SearchOptions, as: Search - - @type conn :: atom - @type reason :: String.t() | atom - @type result(a) :: {:ok, a} | {:error, reason} | {:error, :invalid_client} - - @callback list_buckets(conn) :: result([Bucket.t()]) - @callback retrieve_bucket_info(conn, id) :: result(Bucket.t()) - when id: String.t() - @callback create_bucket(conn, map) :: result(Bucket.t()) - @callback update_bucket(conn, Bucket.t(), map) :: result(Bucket.t()) - @callback empty_bucket(conn, Bucket.t()) :: result(:emptied) - @callback delete_bucket(conn, Bucket.t()) :: result(:deleted) - - @callback remove_object(conn, Bucket.t(), Object.t()) :: result(:deleted) - @callback move_object(conn, Bucket.t(), Object.t(), String.t()) :: result(:moved) - @callback copy_object(conn, Bucket.t(), Object.t(), String.t()) :: result(:copied) - @callback retrieve_object_info(conn, Bucket.t(), String.t()) :: result(Object.t()) - @callback list_objects(conn, Bucket.t(), prefix, Search.t()) :: result([Object.t()]) - when prefix: String.t() - @callback upload_object(conn, Bucket.t(), dest, source, Opts.t()) :: result(Object.t()) - when dest: String.t(), - source: Path.t() - @callback download_object(conn, Bucket.t(), wildcard) :: result(binary) - when wildcard: String.t() - @callback download_object_lazy(conn, Bucket.t(), wildcard) :: result(Stream.t()) - when wildcard: String.t() - @callback save_object(conn, dest, Bucket.t(), wildcard) :: - :ok | {:error, atom} | {:error, :invalid_client} - when wildcard: String.t(), - dest: Path.t() - @callback save_object_stream(conn, dest, Bucket.t(), wildcard) :: - :ok | {:error, atom} | {:error, :invalid_client} - when wildcard: String.t(), - dest: Path.t() - @callback create_signed_url(conn, Bucket.t(), String.t(), integer) :: result(String.t()) -end diff --git a/apps/supabase_storage/mix.exs b/apps/supabase_storage/mix.exs deleted file mode 100644 index d63652f..0000000 --- a/apps/supabase_storage/mix.exs +++ /dev/null @@ -1,68 +0,0 @@ -defmodule Supabase.Storage.MixProject do - use Mix.Project - - @version "0.2.0" - @source_url "https://github.com/zoedsoupe/supabase_storage" - - def project do - [ - app: :supabase_storage, - version: @version, - elixir: "~> 1.15", - build_path: "../../_build", - deps_path: "../../deps", - lockfile: "../../mix.lock", - start_permanent: Mix.env() == :prod, - deps: deps(), - package: package(), - description: description(), - docs: docs() - ] - end - - def application do - [ - extra_applications: [:logger], - mod: {Supabase.Storage.Application, []} - ] - end - - defp deps do - [ - {:ecto, "~> 3.10"}, - {:supabase_potion, umbrella_dep(Mix.env())}, - {:ex_doc, ">= 0.0.0", runtime: false}, - {:credo, "~> 1.7", only: [:dev, :test], runtime: false}, - {:dialyxir, "~> 1.3", only: [:dev], runtime: false} - ] - end - - defp umbrella_dep(:prod), do: "~> 0.2" - defp umbrella_dep(_), do: [in_umbrella: true] - - defp package do - %{ - name: "supabase_storage", - licenses: ["MIT"], - contributors: ["zoedsoupe"], - links: %{ - "GitHub" => @source_url, - "Docs" => "https://hexdocs.pm/supabase_storage" - }, - files: ~w[lib mix.exs README.md LICENSE] - } - end - - defp docs do - [ - main: "Supabase.Storage", - extras: ["README.md"] - ] - end - - defp description do - """ - High level Elixir client for Supabase Storage. - """ - end -end diff --git a/apps/supabase_storage/test/test_helper.exs b/apps/supabase_storage/test/test_helper.exs deleted file mode 100644 index 869559e..0000000 --- a/apps/supabase_storage/test/test_helper.exs +++ /dev/null @@ -1 +0,0 @@ -ExUnit.start() diff --git a/flake.nix b/flake.nix index 8a43f09..72b0146 100644 --- a/flake.nix +++ b/flake.nix @@ -12,44 +12,8 @@ inherit (pkgs.beam.interpreters) erlang_26; inherit (pkgs.beam) packagesWith; beam-pkgs = packagesWith erlang_26; - deps = import ./nix/deps.nix { - inherit (pkgs) lib; - beamPackages = beam-pkgs; - }; }); in { - packages = for-all-systems ({ - beam-pkgs, - deps, - ... - }: rec { - default = supabase-potion; - supabase-potion = beam-pkgs.buildMix { - name = "supabase-potion"; - version = "v0.2.3"; - src = ./apps/supabase_potion; - beamDeps = with deps; [ecto jason finch]; - }; - supabase-storage = beam-pkgs.buildMix { - name = "supabase-storage"; - version = "v0.2.0"; - src = ./apps/supabase_storage; - beamDeps = with deps; [ecto supabase-potion]; - }; - supabase-auth = beam-pkgs.buildMix { - name = "supabase-auth"; - version = "v0.1.0"; - src = ./apps/supabase_auth; - beamDeps = with deps; [ecto plug supabase-potion]; - }; - supabase-postgrest = beam-pkgs.buildMix { - name = "supabase-postgrest"; - version = "v0.1.0"; - src = ./apps/supabase_postgrest; - beamDeps = with deps; [ecto supabase-potion]; - }; - }); - devShells = for-all-systems ({ pkgs, beam-pkgs, diff --git a/apps/supabase_potion/lib/supabase.ex b/lib/supabase.ex similarity index 100% rename from apps/supabase_potion/lib/supabase.ex rename to lib/supabase.ex diff --git a/apps/supabase_potion/lib/supabase/application.ex b/lib/supabase/application.ex similarity index 100% rename from apps/supabase_potion/lib/supabase/application.ex rename to lib/supabase/application.ex diff --git a/apps/supabase_potion/lib/supabase/client.ex b/lib/supabase/client.ex similarity index 100% rename from apps/supabase_potion/lib/supabase/client.ex rename to lib/supabase/client.ex diff --git a/apps/supabase_potion/lib/supabase/client/auth.ex b/lib/supabase/client/auth.ex similarity index 100% rename from apps/supabase_potion/lib/supabase/client/auth.ex rename to lib/supabase/client/auth.ex diff --git a/apps/supabase_potion/lib/supabase/client/conn.ex b/lib/supabase/client/conn.ex similarity index 100% rename from apps/supabase_potion/lib/supabase/client/conn.ex rename to lib/supabase/client/conn.ex diff --git a/apps/supabase_potion/lib/supabase/client/db.ex b/lib/supabase/client/db.ex similarity index 100% rename from apps/supabase_potion/lib/supabase/client/db.ex rename to lib/supabase/client/db.ex diff --git a/apps/supabase_potion/lib/supabase/client/global.ex b/lib/supabase/client/global.ex similarity index 100% rename from apps/supabase_potion/lib/supabase/client/global.ex rename to lib/supabase/client/global.ex diff --git a/apps/supabase_potion/lib/supabase/client_registry.ex b/lib/supabase/client_registry.ex similarity index 100% rename from apps/supabase_potion/lib/supabase/client_registry.ex rename to lib/supabase/client_registry.ex diff --git a/apps/supabase_potion/lib/supabase/client_supervisor.ex b/lib/supabase/client_supervisor.ex similarity index 100% rename from apps/supabase_potion/lib/supabase/client_supervisor.ex rename to lib/supabase/client_supervisor.ex diff --git a/apps/supabase_potion/lib/supabase/fetcher.ex b/lib/supabase/fetcher.ex similarity index 100% rename from apps/supabase_potion/lib/supabase/fetcher.ex rename to lib/supabase/fetcher.ex diff --git a/apps/supabase_potion/lib/supabase/fetcher_behaviour.ex b/lib/supabase/fetcher_behaviour.ex similarity index 100% rename from apps/supabase_potion/lib/supabase/fetcher_behaviour.ex rename to lib/supabase/fetcher_behaviour.ex diff --git a/apps/supabase_potion/lib/supabase/missing_supabase_config.ex b/lib/supabase/missing_supabase_config.ex similarity index 100% rename from apps/supabase_potion/lib/supabase/missing_supabase_config.ex rename to lib/supabase/missing_supabase_config.ex diff --git a/apps/supabase_potion/lib/supabase/types/atom.ex b/lib/supabase/types/atom.ex similarity index 100% rename from apps/supabase_potion/lib/supabase/types/atom.ex rename to lib/supabase/types/atom.ex diff --git a/mix.exs b/mix.exs index 4c63abf..7b9b685 100644 --- a/mix.exs +++ b/mix.exs @@ -1,16 +1,65 @@ -defmodule Supabase.MixProject do +defmodule Supabase.Potion.MixProject do use Mix.Project + @version "0.3.0" + @source_url "https://github.com/zoedsoupe/supabase" + def project do [ - name: :supabase, - apps_path: "apps", + app: :supabase_potion, + version: @version, + build_path: "../../_build", + deps_path: "../../deps", + lockfile: "../../mix.lock", + elixir: "~> 1.14", start_permanent: Mix.env() == :prod, - deps: deps() + deps: deps(), + docs: docs(), + package: package(), + description: description() + ] + end + + def application do + [ + mod: {Supabase.Application, []}, + extra_applications: [:logger] ] end defp deps do - [] + [ + {:finch, "~> 0.16"}, + {:jason, "~> 1.4"}, + {:ecto, "~> 3.10"}, + {:ex_doc, ">= 0.0.0", runtime: false, only: [:dev, :prod]}, + {:credo, "~> 1.7", only: [:dev, :test], runtime: false}, + {:dialyxir, "~> 1.3", only: [:dev], runtime: false} + ] + end + + defp package do + %{ + licenses: ["MIT"], + contributors: ["zoedsoupe"], + links: %{ + "GitHub" => @source_url, + "Docs" => "https://hexdocs.pm/supabase_potion" + }, + files: ~w[lib mix.exs README.md LICENSE] + } + end + + defp docs do + [ + main: "Supabase", + extras: ["README.md"] + ] + end + + defp description do + """ + Complete Elixir client for Supabase. + """ end end diff --git a/mix.lock b/mix.lock index ef2853a..675977b 100644 --- a/mix.lock +++ b/mix.lock @@ -1,12 +1,12 @@ %{ "bunt": {:hex, :bunt, "0.2.1", "e2d4792f7bc0ced7583ab54922808919518d0e57ee162901a16a1b6664ef3b14", [:mix], [], "hexpm", "a330bfb4245239787b15005e66ae6845c9cd524a288f0d141c148b02603777a5"}, - "castore": {:hex, :castore, "1.0.4", "ff4d0fb2e6411c0479b1d965a814ea6d00e51eb2f58697446e9c41a97d940b28", [:mix], [], "hexpm", "9418c1b8144e11656f0be99943db4caf04612e3eaecefb5dae9a2a87565584f8"}, + "castore": {:hex, :castore, "1.0.5", "9eeebb394cc9a0f3ae56b813459f990abb0a3dedee1be6b27fdb50301930502f", [:mix], [], "hexpm", "8d7c597c3e4a64c395980882d4bca3cebb8d74197c590dc272cfd3b6a6310578"}, "credo": {:hex, :credo, "1.7.0", "6119bee47272e85995598ee04f2ebbed3e947678dee048d10b5feca139435f75", [:mix], [{:bunt, "~> 0.2.1", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "6839fcf63d1f0d1c0f450abc8564a57c43d644077ab96f2934563e68b8a769d7"}, "db_connection": {:hex, :db_connection, "2.5.0", "bb6d4f30d35ded97b29fe80d8bd6f928a1912ca1ff110831edcd238a1973652c", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c92d5ba26cd69ead1ff7582dbb860adeedfff39774105a4f1c92cbb654b55aa2"}, "decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"}, "dialyxir": {:hex, :dialyxir, "1.4.1", "a22ed1e7bd3a3e3f197b68d806ef66acb61ee8f57b3ac85fc5d57354c5482a93", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "84b795d6d7796297cca5a3118444b80c7d94f7ce247d49886e7c291e1ae49801"}, "earmark_parser": {:hex, :earmark_parser, "1.4.35", "437773ca9384edf69830e26e9e7b2e0d22d2596c4a6b17094a3b29f01ea65bb8", [:mix], [], "hexpm", "8652ba3cb85608d0d7aa2d21b45c6fad4ddc9a1f9a1f1b30ca3a246f0acc33f6"}, - "ecto": {:hex, :ecto, "3.11.0", "ff8614b4e70a774f9d39af809c426def80852048440e8785d93a6e91f48fec00", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7769dad267ef967310d6e988e92d772659b11b09a0c015f101ce0fff81ce1f81"}, + "ecto": {:hex, :ecto, "3.11.1", "4b4972b717e7ca83d30121b12998f5fcdc62ba0ed4f20fd390f16f3270d85c3e", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ebd3d3772cd0dfcd8d772659e41ed527c28b2a8bde4b00fe03e0463da0f1983b"}, "ecto_sql": {:hex, :ecto_sql, "3.10.2", "6b98b46534b5c2f8b8b5f03f126e75e2a73c64f3c071149d32987a5378b0fdbd", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.10.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "68c018debca57cb9235e3889affdaec7a10616a4e3a80c99fa1d01fdafaa9007"}, "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, "ex_doc": {:hex, :ex_doc, "0.30.6", "5f8b54854b240a2b55c9734c4b1d0dd7bdd41f71a095d42a70445c03cf05a281", [:mix], [{:earmark_parser, "~> 1.4.31", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "bd48f2ddacf4e482c727f9293d9498e0881597eae6ddc3d9562bd7923375109f"}, @@ -18,9 +18,9 @@ "makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"}, "makeup_erlang": {:hex, :makeup_erlang, "0.1.2", "ad87296a092a46e03b7e9b0be7631ddcf64c790fa68a9ef5323b6cbb36affc72", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f3f5a1ca93ce6e092d92b6d9c049bcda58a3b617a8d888f8e7231c85630e8108"}, "mime": {:hex, :mime, "2.0.5", "dc34c8efd439abe6ae0343edbb8556f4d63f178594894720607772a041b04b02", [:mix], [], "hexpm", "da0d64a365c45bc9935cc5c8a7fc5e49a0e0f9932a761c55d6c52b142780a05c"}, - "mint": {:hex, :mint, "1.5.1", "8db5239e56738552d85af398798c80648db0e90f343c8469f6c6d8898944fb6f", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "4a63e1e76a7c3956abd2c72f370a0d0aecddc3976dea5c27eccbecfa5e7d5b1e"}, + "mint": {:hex, :mint, "1.5.2", "4805e059f96028948870d23d7783613b7e6b0e2fb4e98d720383852a760067fd", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "d77d9e9ce4eb35941907f1d3df38d8f750c357865353e21d335bdcdf6d892a02"}, "multipart": {:hex, :multipart, "0.1.1", "952c6aeb41c762d1ea9776c891754cfb61962c9d7b0f84fb63454779910b379d", [:mix], [{:mime, "~> 1.2", [hex: :mime, repo: "hexpm", optional: false]}], "hexpm", "bc349da107810c220ef0366724e445a1a2a39e6be3a361c6a141e0d507eee157"}, - "nimble_options": {:hex, :nimble_options, "1.0.2", "92098a74df0072ff37d0c12ace58574d26880e522c22801437151a159392270e", [:mix], [], "hexpm", "fd12a8db2021036ce12a309f26f564ec367373265b53e25403f0ee697380f1b8"}, + "nimble_options": {:hex, :nimble_options, "1.1.0", "3b31a57ede9cb1502071fade751ab0c7b8dbe75a9a4c2b5bbb0943a690b63172", [:mix], [], "hexpm", "8bbbb3941af3ca9acc7835f5655ea062111c9c27bcac53e004460dfd19008a99"}, "nimble_parsec": {:hex, :nimble_parsec, "1.3.1", "2c54013ecf170e249e9291ed0a62e5832f70a476c61da16f6aac6dca0189f2af", [:mix], [], "hexpm", "2682e3c0b2eb58d90c6375fc0cc30bc7be06f365bf72608804fb9cffa5e1b167"}, "nimble_pool": {:hex, :nimble_pool, "1.0.0", "5eb82705d138f4dd4423f69ceb19ac667b3b492ae570c9f5c900bb3d2f50a847", [:mix], [], "hexpm", "80be3b882d2d351882256087078e1b1952a28bf98d0a287be87e4a24a710b67a"}, "plug": {:hex, :plug, "1.15.2", "94cf1fa375526f30ff8770837cb804798e0045fd97185f0bb9e5fcd858c792a3", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "02731fa0c2dcb03d8d21a1d941bdbbe99c2946c0db098eee31008e04c6283615"}, diff --git a/nix/deps.nix b/nix/deps.nix deleted file mode 100644 index c621466..0000000 --- a/nix/deps.nix +++ /dev/null @@ -1,428 +0,0 @@ -{ lib, beamPackages, overrides ? (x: y: {}) }: - -let - buildRebar3 = lib.makeOverridable beamPackages.buildRebar3; - buildMix = lib.makeOverridable beamPackages.buildMix; - buildErlangMk = lib.makeOverridable beamPackages.buildErlangMk; - - self = packages // (overrides self packages); - - packages = with beamPackages; with self; { - bunt = buildMix rec { - name = "bunt"; - version = "0.2.1"; - - src = fetchHex { - pkg = "bunt"; - version = "${version}"; - sha256 = "a330bfb4245239787b15005e66ae6845c9cd524a288f0d141c148b02603777a5"; - }; - - beamDeps = []; - }; - - castore = buildMix rec { - name = "castore"; - version = "1.0.4"; - - src = fetchHex { - pkg = "castore"; - version = "${version}"; - sha256 = "9418c1b8144e11656f0be99943db4caf04612e3eaecefb5dae9a2a87565584f8"; - }; - - beamDeps = []; - }; - - credo = buildMix rec { - name = "credo"; - version = "1.7.0"; - - src = fetchHex { - pkg = "credo"; - version = "${version}"; - sha256 = "6839fcf63d1f0d1c0f450abc8564a57c43d644077ab96f2934563e68b8a769d7"; - }; - - beamDeps = [ bunt file_system jason ]; - }; - - db_connection = buildMix rec { - name = "db_connection"; - version = "2.5.0"; - - src = fetchHex { - pkg = "db_connection"; - version = "${version}"; - sha256 = "c92d5ba26cd69ead1ff7582dbb860adeedfff39774105a4f1c92cbb654b55aa2"; - }; - - beamDeps = [ telemetry ]; - }; - - decimal = buildMix rec { - name = "decimal"; - version = "2.1.1"; - - src = fetchHex { - pkg = "decimal"; - version = "${version}"; - sha256 = "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"; - }; - - beamDeps = []; - }; - - dialyxir = buildMix rec { - name = "dialyxir"; - version = "1.4.1"; - - src = fetchHex { - pkg = "dialyxir"; - version = "${version}"; - sha256 = "84b795d6d7796297cca5a3118444b80c7d94f7ce247d49886e7c291e1ae49801"; - }; - - beamDeps = [ erlex ]; - }; - - earmark_parser = buildMix rec { - name = "earmark_parser"; - version = "1.4.35"; - - src = fetchHex { - pkg = "earmark_parser"; - version = "${version}"; - sha256 = "8652ba3cb85608d0d7aa2d21b45c6fad4ddc9a1f9a1f1b30ca3a246f0acc33f6"; - }; - - beamDeps = []; - }; - - ecto = buildMix rec { - name = "ecto"; - version = "3.11.0"; - - src = fetchHex { - pkg = "ecto"; - version = "${version}"; - sha256 = "7769dad267ef967310d6e988e92d772659b11b09a0c015f101ce0fff81ce1f81"; - }; - - beamDeps = [ decimal jason telemetry ]; - }; - - ecto_sql = buildMix rec { - name = "ecto_sql"; - version = "3.10.2"; - - src = fetchHex { - pkg = "ecto_sql"; - version = "${version}"; - sha256 = "68c018debca57cb9235e3889affdaec7a10616a4e3a80c99fa1d01fdafaa9007"; - }; - - beamDeps = [ db_connection ecto postgrex telemetry ]; - }; - - erlex = buildMix rec { - name = "erlex"; - version = "0.2.6"; - - src = fetchHex { - pkg = "erlex"; - version = "${version}"; - sha256 = "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"; - }; - - beamDeps = []; - }; - - ex_doc = buildMix rec { - name = "ex_doc"; - version = "0.30.6"; - - src = fetchHex { - pkg = "ex_doc"; - version = "${version}"; - sha256 = "bd48f2ddacf4e482c727f9293d9498e0881597eae6ddc3d9562bd7923375109f"; - }; - - beamDeps = [ earmark_parser makeup_elixir makeup_erlang ]; - }; - - file_system = buildMix rec { - name = "file_system"; - version = "0.2.10"; - - src = fetchHex { - pkg = "file_system"; - version = "${version}"; - sha256 = "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"; - }; - - beamDeps = []; - }; - - finch = buildMix rec { - name = "finch"; - version = "0.16.0"; - - src = fetchHex { - pkg = "finch"; - version = "${version}"; - sha256 = "f660174c4d519e5fec629016054d60edd822cdfe2b7270836739ac2f97735ec5"; - }; - - beamDeps = [ castore mime mint nimble_options nimble_pool telemetry ]; - }; - - hpax = buildMix rec { - name = "hpax"; - version = "0.1.2"; - - src = fetchHex { - pkg = "hpax"; - version = "${version}"; - sha256 = "2c87843d5a23f5f16748ebe77969880e29809580efdaccd615cd3bed628a8c13"; - }; - - beamDeps = []; - }; - - jason = buildMix rec { - name = "jason"; - version = "1.4.1"; - - src = fetchHex { - pkg = "jason"; - version = "${version}"; - sha256 = "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"; - }; - - beamDeps = [ decimal ]; - }; - - makeup = buildMix rec { - name = "makeup"; - version = "1.1.0"; - - src = fetchHex { - pkg = "makeup"; - version = "${version}"; - sha256 = "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"; - }; - - beamDeps = [ nimble_parsec ]; - }; - - makeup_elixir = buildMix rec { - name = "makeup_elixir"; - version = "0.16.1"; - - src = fetchHex { - pkg = "makeup_elixir"; - version = "${version}"; - sha256 = "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"; - }; - - beamDeps = [ makeup nimble_parsec ]; - }; - - makeup_erlang = buildMix rec { - name = "makeup_erlang"; - version = "0.1.2"; - - src = fetchHex { - pkg = "makeup_erlang"; - version = "${version}"; - sha256 = "f3f5a1ca93ce6e092d92b6d9c049bcda58a3b617a8d888f8e7231c85630e8108"; - }; - - beamDeps = [ makeup ]; - }; - - mime = buildMix rec { - name = "mime"; - version = "2.0.5"; - - src = fetchHex { - pkg = "mime"; - version = "${version}"; - sha256 = "da0d64a365c45bc9935cc5c8a7fc5e49a0e0f9932a761c55d6c52b142780a05c"; - }; - - beamDeps = []; - }; - - mint = buildMix rec { - name = "mint"; - version = "1.5.1"; - - src = fetchHex { - pkg = "mint"; - version = "${version}"; - sha256 = "4a63e1e76a7c3956abd2c72f370a0d0aecddc3976dea5c27eccbecfa5e7d5b1e"; - }; - - beamDeps = [ castore hpax ]; - }; - - multipart = buildMix rec { - name = "multipart"; - version = "0.1.1"; - - src = fetchHex { - pkg = "multipart"; - version = "${version}"; - sha256 = "bc349da107810c220ef0366724e445a1a2a39e6be3a361c6a141e0d507eee157"; - }; - - beamDeps = [ mime ]; - }; - - nimble_options = buildMix rec { - name = "nimble_options"; - version = "1.0.2"; - - src = fetchHex { - pkg = "nimble_options"; - version = "${version}"; - sha256 = "fd12a8db2021036ce12a309f26f564ec367373265b53e25403f0ee697380f1b8"; - }; - - beamDeps = []; - }; - - nimble_parsec = buildMix rec { - name = "nimble_parsec"; - version = "1.3.1"; - - src = fetchHex { - pkg = "nimble_parsec"; - version = "${version}"; - sha256 = "2682e3c0b2eb58d90c6375fc0cc30bc7be06f365bf72608804fb9cffa5e1b167"; - }; - - beamDeps = []; - }; - - nimble_pool = buildMix rec { - name = "nimble_pool"; - version = "1.0.0"; - - src = fetchHex { - pkg = "nimble_pool"; - version = "${version}"; - sha256 = "80be3b882d2d351882256087078e1b1952a28bf98d0a287be87e4a24a710b67a"; - }; - - beamDeps = []; - }; - - plug = buildMix rec { - name = "plug"; - version = "1.15.2"; - - src = fetchHex { - pkg = "plug"; - version = "${version}"; - sha256 = "02731fa0c2dcb03d8d21a1d941bdbbe99c2946c0db098eee31008e04c6283615"; - }; - - beamDeps = [ mime plug_crypto telemetry ]; - }; - - plug_crypto = buildMix rec { - name = "plug_crypto"; - version = "2.0.0"; - - src = fetchHex { - pkg = "plug_crypto"; - version = "${version}"; - sha256 = "53695bae57cc4e54566d993eb01074e4d894b65a3766f1c43e2c61a1b0f45ea9"; - }; - - beamDeps = []; - }; - - postgrex = buildMix rec { - name = "postgrex"; - version = "0.17.3"; - - src = fetchHex { - pkg = "postgrex"; - version = "${version}"; - sha256 = "946cf46935a4fdca7a81448be76ba3503cff082df42c6ec1ff16a4bdfbfb098d"; - }; - - beamDeps = [ db_connection decimal jason ]; - }; - - supabase_connection = buildMix rec { - name = "supabase_connection"; - version = "0.1.0"; - - src = fetchHex { - pkg = "supabase_connection"; - version = "${version}"; - sha256 = "5435f2892d13c5f00d26b4a61b3fc823683fc6699936d9b7c201ebf73c33e226"; - }; - - beamDeps = [ ecto supabase_types ]; - }; - - supabase_fetcher = buildMix rec { - name = "supabase_fetcher"; - version = "0.1.0"; - - src = fetchHex { - pkg = "supabase_fetcher"; - version = "${version}"; - sha256 = "33725892d1fb51c4d6aca49a6142fa0ef433fd5f20a17113c469f80e090d5c5f"; - }; - - beamDeps = [ finch jason ]; - }; - - supabase_storage = buildMix rec { - name = "supabase_storage"; - version = "0.1.0"; - - src = fetchHex { - pkg = "supabase_storage"; - version = "${version}"; - sha256 = "4b8343f8b0c39633bcf8ae7a82b4c92ab22cac66f67bf7bbb2cb948072c192e9"; - }; - - beamDeps = [ ecto supabase_connection supabase_fetcher ]; - }; - - supabase_types = buildMix rec { - name = "supabase_types"; - version = "0.1.1"; - - src = fetchHex { - pkg = "supabase_types"; - version = "${version}"; - sha256 = "a8cc84753fdd160f4db4ea31a3c92b60c5efea2d6153a11da19e02943433e42f"; - }; - - beamDeps = [ ecto ]; - }; - - telemetry = buildRebar3 rec { - name = "telemetry"; - version = "1.2.1"; - - src = fetchHex { - pkg = "telemetry"; - version = "${version}"; - sha256 = "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"; - }; - - beamDeps = []; - }; - }; -in self - diff --git a/apps/supabase_potion/test/supabase_test.exs b/test/supabase_test.exs similarity index 100% rename from apps/supabase_potion/test/supabase_test.exs rename to test/supabase_test.exs diff --git a/apps/supabase_auth/test/test_helper.exs b/test/test_helper.exs similarity index 100% rename from apps/supabase_auth/test/test_helper.exs rename to test/test_helper.exs