diff --git a/.env b/.env index ba74042..ab29ada 100644 --- a/.env +++ b/.env @@ -1,5 +1,12 @@ +# Server env POSTGRES_USER="postgres" POSTGRES_PASSWORD="localpw" POSTGRES_URI="localhost" POSTGRES_PORT="5432" -POSTGRES_DB_NAME="forc_pub" \ No newline at end of file +POSTGRES_DB_NAME="forc_pub" + +# Local env +CORS_HTTP_ORIGIN="http://localhost:3000" + +# Diesel CLI env +DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_URI}/${POSTGRES_DB_NAME}" \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 19dea3c..fb82881 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -438,11 +438,14 @@ dependencies = [ "dotenvy", "hex", "nanoid", + "rand", "regex", "reqwest", "rocket", "serde", "serde_json", + "serial_test", + "sha2", "thiserror", "tokio", "uuid", @@ -480,6 +483,7 @@ checksum = "38390104763dc37a5145a53c29c63c1290b5d316d6086ec32c293f6736051bb0" dependencies = [ "futures-channel", "futures-core", + "futures-executor", "futures-io", "futures-sink", "futures-task", @@ -502,6 +506,17 @@ version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04909a7a7e4633ae6c4a9ab280aeb86da1236243a77b694a49eacd659a4bd3ac" +[[package]] +name = "futures-executor" +version = "0.3.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7acc85df6714c176ab5edf386123fafe217be88c0840ec11f199441134a074e2" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + [[package]] name = "futures-io" version = "0.3.25" @@ -1580,6 +1595,15 @@ version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" +[[package]] +name = "scc" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec96560eea317a9cc4e0bb1f6a2c93c09a19b8c4fc5cb3fcc0ec1c094cd783e2" +dependencies = [ + "sdd", +] + [[package]] name = "schannel" version = "0.1.23" @@ -1620,6 +1644,12 @@ dependencies = [ "untrusted", ] +[[package]] +name = "sdd" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b84345e4c9bd703274a082fb80caaa99b7612be48dfaa1dd9266577ec412309d" + [[package]] name = "security-framework" version = "2.10.0" @@ -1695,11 +1725,36 @@ dependencies = [ "serde", ] +[[package]] +name = "serial_test" +version = "3.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b4b487fe2acf240a021cf57c6b2b4903b1e78ca0ecd862a71b71d2a51fed77d" +dependencies = [ + "futures", + "log", + "once_cell", + "parking_lot", + "scc", + "serial_test_derive", +] + +[[package]] +name = "serial_test_derive" +version = "3.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.57", +] + [[package]] name = "sha2" -version = "0.10.6" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", diff --git a/Cargo.toml b/Cargo.toml index 72cd0b5..d128183 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,3 +21,6 @@ diesel = { version = "2.1.6", features = ["postgres", "uuid", "r2d2"] } dotenvy = "0.15" uuid = "1.8.0" diesel_migrations = "2.1.0" +rand = "0.8.5" +sha2 = "0.10.8" +serial_test = "3.1.1" diff --git a/app/package-lock.json b/app/package-lock.json index 8f9d421..f289321 100644 --- a/app/package-lock.json +++ b/app/package-lock.json @@ -20,10 +20,14 @@ "@types/node": "^16.18.91", "@types/react": "^18.2.67", "@types/react-dom": "^18.2.22", + "axios": "^1.6.8", "react": "^18.2.0", + "react-cookie": "^7.1.4", "react-dom": "^18.2.0", "react-router-dom": "^6.22.3", "react-scripts": "5.0.1", + "react-use-cookie": "^1.5.0", + "typed-axios-instance": "^3.3.1", "typescript": "^4.9.5", "usehooks-ts": "^3.0.2", "web-vitals": "^2.1.4" @@ -4520,6 +4524,11 @@ "@types/node": "*" } }, + "node_modules/@types/cookie": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==" + }, "node_modules/@types/eslint": { "version": "8.56.6", "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.56.6.tgz", @@ -4573,6 +4582,15 @@ "@types/node": "*" } }, + "node_modules/@types/hoist-non-react-statics": { + "version": "3.3.5", + "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.5.tgz", + "integrity": "sha512-SbcrWzkKBw2cdwRTwQAswfpB9g9LJWfjtUeW/jvNwbhC8cpmmNYVePa+ncbUe0rGTQ7G3Ff6mYUN2VMfLVr+Sg==", + "dependencies": { + "@types/react": "*", + "hoist-non-react-statics": "^3.3.0" + } + }, "node_modules/@types/html-minifier-terser": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz", @@ -5343,6 +5361,17 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/ansi-escapes/node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/ansi-html-community": { "version": "0.0.8", "resolved": "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz", @@ -5678,6 +5707,29 @@ "node": ">=4" } }, + "node_modules/axios": { + "version": "1.6.8", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.8.tgz", + "integrity": "sha512-v/ZHtJDU39mDpyBoFVkETcd/uNdxrWRrg3bKpOKzXFA6Bvqopts6ALSMU3y6ijYxbw2B+wPrIv46egTzJXCLGQ==", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/axios/node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/axobject-query": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-3.2.1.tgz", @@ -15075,6 +15127,11 @@ "node": ">= 0.10" } }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" + }, "node_modules/psl": { "version": "1.9.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz", @@ -15224,6 +15281,19 @@ "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz", "integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==" }, + "node_modules/react-cookie": { + "version": "7.1.4", + "resolved": "https://registry.npmjs.org/react-cookie/-/react-cookie-7.1.4.tgz", + "integrity": "sha512-wDxxa/HYaSXSMlyWJvJ5uZTzIVtQTPf1gMksFgwAz/2/W3lCtY8r4OChCXMPE7wax0PAdMY97UkNJedGv7KnDw==", + "dependencies": { + "@types/hoist-non-react-statics": "^3.3.5", + "hoist-non-react-statics": "^3.3.2", + "universal-cookie": "^7.0.0" + }, + "peerDependencies": { + "react": ">= 16.3.0" + } + }, "node_modules/react-dev-utils": { "version": "12.0.1", "resolved": "https://registry.npmjs.org/react-dev-utils/-/react-dev-utils-12.0.1.tgz", @@ -15488,6 +15558,18 @@ "react-dom": ">=16.6.0" } }, + "node_modules/react-use-cookie": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/react-use-cookie/-/react-use-cookie-1.5.0.tgz", + "integrity": "sha512-zPEAmAYbRLXzpi3VD3rjYHszTo8BonuiaiLH/jYixHr6qE+Yukm2lA6AsinX1uL7/9nFSVeKBLqI4oOZYdhghQ==", + "engines": { + "node": ">=8", + "npm": ">=5" + }, + "peerDependencies": { + "react": ">=16.8" + } + }, "node_modules/read-cache": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", @@ -17390,11 +17472,12 @@ } }, "node_modules/type-fest": { - "version": "0.21.3", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", - "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "version": "4.18.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.18.1.tgz", + "integrity": "sha512-qXhgeNsX15bM63h5aapNFcQid9jRF/l3ojDoDFmekDQEUufZ9U4ErVt6SjDxnHp48Ltrw616R8yNc3giJ3KvVQ==", + "peer": true, "engines": { - "node": ">=10" + "node": ">=16" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -17481,6 +17564,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/typed-axios-instance": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/typed-axios-instance/-/typed-axios-instance-3.3.1.tgz", + "integrity": "sha512-7psbeu3yncZZGJFduGXCuq0HIxLbUltPiOsInTa9Wo7k3K6kdZNd9Q/QKB61g6N4eVXiVT8Ey7WpS1aozXkniA==", + "peerDependencies": { + "axios": ">=1.0.0", + "type-fest": ">=3.0.0" + } + }, "node_modules/typedarray-to-buffer": { "version": "3.1.5", "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", @@ -17567,6 +17659,15 @@ "node": ">=8" } }, + "node_modules/universal-cookie": { + "version": "7.1.4", + "resolved": "https://registry.npmjs.org/universal-cookie/-/universal-cookie-7.1.4.tgz", + "integrity": "sha512-Q+DVJsdykStWRMtXr2Pdj3EF98qZHUH/fXv/gwFz/unyToy1Ek1w5GsWt53Pf38tT8Gbcy5QNsj61Xe9TggP4g==", + "dependencies": { + "@types/cookie": "^0.6.0", + "cookie": "^0.6.0" + } + }, "node_modules/universalify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", diff --git a/app/package.json b/app/package.json index 569de55..5021513 100644 --- a/app/package.json +++ b/app/package.json @@ -15,10 +15,13 @@ "@types/node": "^16.18.91", "@types/react": "^18.2.67", "@types/react-dom": "^18.2.22", + "axios": "^1.6.8", "react": "^18.2.0", "react-dom": "^18.2.0", "react-router-dom": "^6.22.3", "react-scripts": "5.0.1", + "react-use-cookie": "^1.5.0", + "typed-axios-instance": "^3.3.1", "typescript": "^4.9.5", "usehooks-ts": "^3.0.2", "web-vitals": "^2.1.4" diff --git a/app/src/features/tokens/components/CopyableToken.tsx b/app/src/features/tokens/components/CopyableToken.tsx new file mode 100644 index 0000000..5664f6d --- /dev/null +++ b/app/src/features/tokens/components/CopyableToken.tsx @@ -0,0 +1,43 @@ +import React from 'react'; +import IconButton from '@mui/material/IconButton'; +import ContentCopyIcon from '@mui/icons-material/ContentCopy'; + +export interface CopyableProps { + token: string; +} + +async function handleCopy(value: string) { + await navigator.clipboard.writeText(value); +} + +function CopyableToken({ token }: CopyableProps) { + return ( +
+
+
{token}
+
+
+ handleCopy(token)} aria-label='copy'> + + +
+
+ ); +} + +export default CopyableToken; diff --git a/app/src/features/tokens/components/TokenCard.tsx b/app/src/features/tokens/components/TokenCard.tsx new file mode 100644 index 0000000..86c0b7a --- /dev/null +++ b/app/src/features/tokens/components/TokenCard.tsx @@ -0,0 +1,52 @@ +import React from 'react'; +import { Button } from '@mui/material'; +import { Token } from '../hooks/useApiTokens'; +import CopyableToken from './CopyableToken'; + +export interface TokenCardProps { + token: Token; + handleRevoke: () => Promise; +} + +function TokenCard({ token, handleRevoke }: TokenCardProps) { + return ( +
+
+

{token.name}

+ + +
+
+ {`Created ${token.createdAt.toLocaleString()}`} +
+ {token.token && ( + <> +
+ { + 'Make sure to copy your API token now. You won’t be able to see it again!' + } +
+ + + + )} +
+ ); +} + +export default TokenCard; diff --git a/app/src/features/tokens/hooks/useApiTokens.ts b/app/src/features/tokens/hooks/useApiTokens.ts new file mode 100644 index 0000000..803f258 --- /dev/null +++ b/app/src/features/tokens/hooks/useApiTokens.ts @@ -0,0 +1,72 @@ +import { useCallback, useEffect, useState } from 'react'; +import { useGithubAuth } from '../../toolbar/hooks/useGithubAuth'; +import HTTP, { + CreateTokenResponse, + RawToken, +} from '../../../utils/http'; + +export interface Token { + id: string; + name: string; + token?: string; + createdAt: Date; +} + +function rawTokenToToken(rawToken: RawToken): Token { + return { + id: rawToken.id, + name: rawToken.name, + token: rawToken.token, + createdAt: new Date(rawToken.createdAt), + }; +} + +export function useApiTokens(): { + newToken: Token | null; + tokens: Token[]; + createToken: (name: string) => Promise; + revokeToken: (id: string) => Promise; +} { + const [githubUser] = useGithubAuth(); + const [tokens, setTokens] = useState([]); + const [newToken, setNewToken] = useState(null); + + const createToken = useCallback( + async (name: string) => { + const { data } = await HTTP.post(`/new_token`, { name }); + if (data.token) { + setNewToken(rawTokenToToken(data.token)); + } + return data; + }, + [setNewToken] + ); + + const revokeToken = useCallback( + async (id: string) => { + HTTP.delete(`/token/${id}`).then(() => { + setTokens([...tokens.filter((token) => token.id !== id)]); + if (newToken?.id === id) { + setNewToken(null); + } + }); + }, + [setTokens, tokens, newToken, setNewToken] + ); + + useEffect(() => { + if (!githubUser) { + return; + } + + HTTP.get(`/tokens`).then(({data}) => { + setTokens([ + ...data.tokens + .filter((token) => token.id !== newToken?.id) + .map(rawTokenToToken), + ]); + }); + }, [setTokens, githubUser, newToken]); + + return { newToken, tokens, createToken, revokeToken }; +} diff --git a/app/src/features/toolbar/components/UserButton.tsx b/app/src/features/toolbar/components/UserButton.tsx index 42b650f..0a03291 100644 --- a/app/src/features/toolbar/components/UserButton.tsx +++ b/app/src/features/toolbar/components/UserButton.tsx @@ -6,7 +6,6 @@ import Menu from '@mui/material/Menu/Menu'; import MenuItem from '@mui/material/MenuItem/MenuItem'; import { useNavigate } from 'react-router-dom'; import { useGithubAuth } from '../hooks/useGithubAuth'; -import { useLocalSession } from '../../../utils/localStorage'; import ArrowDropDownIcon from '@mui/icons-material/ArrowDropDown'; import { REDIRECT_URI } from '../../../constants'; @@ -18,7 +17,6 @@ const StyledWrapper = styled.div` `; function UserButton() { - const { clearSessionId, sessionId } = useLocalSession(); const navigate = useNavigate(); const [user, logout] = useGithubAuth(); const [anchorEl, setAnchorEl] = React.useState(null); @@ -43,12 +41,11 @@ function UserButton() { ); const handleLogout = useCallback(() => { - clearSessionId(); logout(); handleNavigate('/'); - }, [handleNavigate, logout, clearSessionId]); + }, [handleNavigate, logout]); - if (user && sessionId) { + if (!!user) { return ( + + + ); + } + + return ( +
+

{'API Tokens'}

+
+

API Tokens

+ + +
+ +
+ {newToken && ( + { + await revokeToken(newToken.id); + }} + /> + )} + {tokens.map((token) => ( + revokeToken(token.id)} + /> + ))} + {!tokens.length && !newToken && ( +
+ {`You haven't generated any API tokens yet.`} +
+ )} +
+
+ ); +} + +export default ApiTokens; diff --git a/app/src/utils/http.ts b/app/src/utils/http.ts new file mode 100644 index 0000000..16d3056 --- /dev/null +++ b/app/src/utils/http.ts @@ -0,0 +1,94 @@ +import type { TypedAxios } from 'typed-axios-instance'; +import axios from 'axios'; +import { SERVER_URI } from '../constants'; + +export interface AuthenticatedUser { + fullName: string; + email?: string; + githubUrl: string; + githubLogin: string; + isAdmin: boolean; + avatarUrl?: string; +} + +export interface LoginRequest { + code: string; + } + +export interface LoginResponse { + sessionId: string; + user: AuthenticatedUser; +} + +export interface UserResponse { + user: AuthenticatedUser; +} + + export interface RawToken { + id: string, + name: string, + token?: string, + createdAt: Date, + } + + export interface CreateTokenRequest { + name: string; + } + export interface CreateTokenResponse { + token?: RawToken; + error?: string; + } + + export interface TokensResponse { + tokens: RawToken[]; + error?: string; + } + +type Routes = [ + { + route: '/user'; + method: 'GET'; + jsonResponse: UserResponse; + }, + { + route: '/login'; + method: 'POST'; + jsonBody: LoginRequest; + jsonResponse: LoginResponse; + }, + { + route: '/logout'; + method: 'POST'; + }, + { + route: '/new_token'; + method: 'POST'; + jsonBody: CreateTokenRequest; + jsonResponse: CreateTokenResponse; + }, + { + route: '/tokens'; + method: 'GET'; + jsonResponse: TokensResponse; + }, + { + route: '/token/[id]'; + method: 'DELETE'; + } +]; + +const HTTP: TypedAxios = axios.create({ + withCredentials: true, + baseURL: SERVER_URI, +}); + +// Intercept the response and log any errors. +HTTP.interceptors.response.use(function (response) { + // Any status code that lie within the range of 2xx cause this function to trigger + return response; + }, function (error) { + // Any status codes that falls outside the range of 2xx cause this function to trigger + console.error('HTTP Error:', error); + return Promise.reject(error); + }); +export default HTTP; diff --git a/app/src/utils/localStorage.ts b/app/src/utils/localStorage.ts index 2e3b72f..b503a3a 100644 --- a/app/src/utils/localStorage.ts +++ b/app/src/utils/localStorage.ts @@ -1,7 +1,6 @@ import { useLocalStorage } from "usehooks-ts"; const STORAGE_GH_CODE_KEY = 'gh_code'; -const STORAGE_FP_SESSION_KEY = 'fp_session'; export function useLocalSession() { function clear(key: string, handleSave: (value: T | null) => void) { @@ -12,8 +11,5 @@ export function useLocalSession() { const [githubCode, saveGithubCode] = useLocalStorage(STORAGE_GH_CODE_KEY, null); const clearGithubCode = () => clear(STORAGE_GH_CODE_KEY, saveGithubCode); - const [sessionId, saveSessionId] = useLocalStorage(STORAGE_FP_SESSION_KEY, null); - const clearSessionId = () => clear(STORAGE_FP_SESSION_KEY, saveSessionId); - - return {githubCode, saveGithubCode, clearGithubCode, sessionId, saveSessionId, clearSessionId }; + return {githubCode, saveGithubCode, clearGithubCode }; } \ No newline at end of file diff --git a/migrations/2024-04-29-210717_create_api_tokens/down.sql b/migrations/2024-04-29-210717_create_api_tokens/down.sql new file mode 100644 index 0000000..fe77ebc --- /dev/null +++ b/migrations/2024-04-29-210717_create_api_tokens/down.sql @@ -0,0 +1 @@ +DROP TABLE api_tokens diff --git a/migrations/2024-04-29-210717_create_api_tokens/up.sql b/migrations/2024-04-29-210717_create_api_tokens/up.sql new file mode 100644 index 0000000..f94e55d --- /dev/null +++ b/migrations/2024-04-29-210717_create_api_tokens/up.sql @@ -0,0 +1,8 @@ +CREATE TABLE api_tokens ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + user_id uuid NOT NULL REFERENCES users(id), + friendly_name VARCHAR NOT NULL, + token BYTEA NOT NULL, + expires_at TIMESTAMP, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP +) \ No newline at end of file diff --git a/src/api/api_token.rs b/src/api/api_token.rs new file mode 100644 index 0000000..3ed5e0f --- /dev/null +++ b/src/api/api_token.rs @@ -0,0 +1,42 @@ +use crate::{models, util::sys_time_to_epoch}; +use rocket::serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct Token { + pub id: String, + pub name: String, + pub created_at: u64, + pub token: Option, +} + +impl From for Token { + fn from(token: models::ApiToken) -> Self { + Token { + id: token.id.to_string(), + name: token.friendly_name, + created_at: sys_time_to_epoch(token.created_at), + // We don't return the hashed token, as it's a secret. + token: None, + } + } +} +/// The CreateToken request. +#[derive(Deserialize, Debug)] +pub struct CreateTokenRequest { + pub name: String, +} + +/// The response to a CreateToken request. +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct CreateTokenResponse { + pub token: Token, +} + +/// The response to a CreateToken request. +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct TokensResponse { + pub tokens: Vec, +} diff --git a/src/api.rs b/src/api/auth.rs similarity index 59% rename from src/api.rs rename to src/api/auth.rs index 3448dba..66e3579 100644 --- a/src/api.rs +++ b/src/api/auth.rs @@ -36,32 +36,13 @@ pub struct LoginRequest { #[derive(Serialize)] #[serde(rename_all = "camelCase")] pub struct LoginResponse { - pub user: Option, - pub session_id: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub error: Option, + pub user: User, + pub session_id: String, } -/// The response to a session request. +/// The response to a user GET request. #[derive(Serialize)] #[serde(rename_all = "camelCase")] -pub struct SessionResponse { - pub user: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub error: Option, -} - -/// The publish request. -#[derive(Deserialize, Debug)] -pub struct PublishRequest { - pub name: String, - pub version: String, -} - -/// The response to a publish request. -#[derive(Serialize)] -#[serde(rename_all = "camelCase")] -pub struct PublishResponse { - #[serde(skip_serializing_if = "Option::is_none")] - pub error: Option, +pub struct UserResponse { + pub user: User, } diff --git a/src/api/mod.rs b/src/api/mod.rs new file mode 100644 index 0000000..cae4c6a --- /dev/null +++ b/src/api/mod.rs @@ -0,0 +1,35 @@ +pub mod api_token; +pub mod auth; +pub mod publish; + +use rocket::{ + http::Status, + response::Responder, + serde::{json::Json, Serialize}, + Request, +}; +use thiserror::Error; + +/// A wrapper for API responses that can return errors. +pub type ApiResult = Result, ApiError>; + +/// An empty response. +#[derive(Serialize)] +pub struct EmptyResponse; + +#[derive(Error, Debug)] +pub enum ApiError { + #[error("Database error: {0}")] + Database(#[from] crate::db::error::DatabaseError), + #[error("GitHub error: {0}")] + Github(#[from] crate::github::GithubError), +} + +impl<'r, 'o: 'r> Responder<'r, 'o> for ApiError { + fn respond_to(self, _request: &'r Request<'_>) -> rocket::response::Result<'o> { + match self { + ApiError::Database(_) => Err(Status::InternalServerError), + ApiError::Github(_) => Err(Status::Unauthorized), + } + } +} diff --git a/src/api/publish.rs b/src/api/publish.rs new file mode 100644 index 0000000..336b585 --- /dev/null +++ b/src/api/publish.rs @@ -0,0 +1,8 @@ +use rocket::serde::Deserialize; + +/// The publish request. +#[derive(Deserialize, Debug)] +pub struct PublishRequest { + pub name: String, + pub version: String, +} diff --git a/src/cors.rs b/src/cors.rs deleted file mode 100644 index 6e99426..0000000 --- a/src/cors.rs +++ /dev/null @@ -1,28 +0,0 @@ -use rocket::fairing::{Fairing, Info, Kind}; -use rocket::http::Header; -use rocket::{Request, Response}; - -// Build an open cors module so this server can be used accross many locations on the web. -pub struct Cors; - -// Build Cors Fairing. -#[rocket::async_trait] -impl Fairing for Cors { - fn info(&self) -> Info { - Info { - name: "Cross-Origin-Resource-Sharing Fairing", - kind: Kind::Response, - } - } - - // Build an Access-Control-Allow-Origin * policy Response header. - async fn on_response<'r>(&self, _request: &'r Request<'_>, response: &mut Response<'r>) { - response.set_header(Header::new("Access-Control-Allow-Origin", "*")); - response.set_header(Header::new( - "Access-Control-Allow-Methods", - "POST, PATCH, PUT, DELETE, HEAD, OPTIONS, GET", - )); - response.set_header(Header::new("Access-Control-Allow-Headers", "*")); - response.set_header(Header::new("Access-Control-Allow-Credentials", "true")); - } -} diff --git a/src/db/api_token.rs b/src/db/api_token.rs new file mode 100644 index 0000000..3a04ac9 --- /dev/null +++ b/src/db/api_token.rs @@ -0,0 +1,138 @@ +use super::error::DatabaseError; +use super::string_to_uuid; +use super::{models, schema, DbConn}; +use diesel::prelude::*; +use rand::{distributions::Uniform, rngs::OsRng, Rng}; +use sha2::{Digest, Sha256}; +use uuid::Uuid; + +/// NEVER CHANGE THE PREFIX OF EXISTING TOKENS!!! Doing so will implicitly +/// revoke all the tokens, disrupting production users. +const TOKEN_PREFIX: &str = "pub_"; +const TOKEN_LENGTH: usize = 32; + +/// A plain-text API token. +#[derive(Debug)] +pub struct PlainToken(String); + +impl Default for PlainToken { + fn default() -> Self { + Self::new() + } +} + +impl PlainToken { + pub fn hash(&self) -> Vec { + Sha256::digest(self.0.as_bytes()).as_slice().to_vec() + } + + pub fn new() -> Self { + const CHARS: &[u8] = b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; + + let secure_alphanumeric_string = OsRng + .sample_iter(Uniform::from(0..CHARS.len())) + .map(|idx| CHARS[idx] as char) + .take(TOKEN_LENGTH) + .collect::(); + + Self(format!("{}{}", TOKEN_PREFIX, secure_alphanumeric_string)) + } +} + +impl From for PlainToken { + fn from(s: String) -> Self { + Self(s) + } +} + +impl From for String { + fn from(val: PlainToken) -> Self { + val.0 + } +} + +impl DbConn { + /// Creates an API token for the user and returns the token. + pub fn new_token( + &mut self, + user_id: Uuid, + friendly_name: String, + ) -> Result<(models::ApiToken, PlainToken), DatabaseError> { + let plain_token = PlainToken::new(); + let token = plain_token.hash(); + + let new_token = models::NewApiToken { + user_id, + friendly_name, + token, + expires_at: None, + }; + + // Insert new session + let saved_token = diesel::insert_into(schema::api_tokens::table) + .values(&new_token) + .returning(models::ApiToken::as_returning()) + .get_result(self.inner()) + .map_err(|_| DatabaseError::InsertTokenFailed(user_id.to_string()))?; + + Ok((saved_token, plain_token)) + } + + /// Deletes an API token for the user. + pub fn delete_token(&mut self, user_id: Uuid, token_id: String) -> Result<(), DatabaseError> { + let token_uuid = string_to_uuid(token_id.clone())?; + + diesel::delete( + schema::api_tokens::table + .filter(schema::api_tokens::id.eq(token_uuid)) + .filter(schema::api_tokens::user_id.eq(user_id)), + ) + .execute(self.inner()) + .map_err(|_| DatabaseError::NotFound(token_id))?; + + Ok(()) + } + + /// Fetch all tokens for the given user ID. + pub fn get_tokens_for_user( + &mut self, + user_id: Uuid, + ) -> Result, DatabaseError> { + schema::api_tokens::table + .filter(schema::api_tokens::user_id.eq(user_id)) + .select(models::ApiToken::as_returning()) + .load(self.inner()) + .map_err(|_| DatabaseError::NotFound(user_id.to_string())) + } + + /// Fetch an API token given the plaintext token. + pub fn get_token( + &mut self, + plain_token: PlainToken, + ) -> Result { + let hashed = plain_token.hash(); + schema::api_tokens::table + .filter(schema::api_tokens::token.eq(hashed)) + .select(models::ApiToken::as_returning()) + .first::(self.inner()) + .map_err(|_| DatabaseError::NotFound("API Token".to_string())) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_plain_token_new() { + let token = PlainToken::new(); + assert!(token.0.starts_with(TOKEN_PREFIX)); + assert_eq!(token.hash(), Sha256::digest(token.0.as_bytes()).as_slice()); + } + + #[test] + fn test_plain_token_from() { + let token = PlainToken::from("123456".to_string()); + assert_eq!(token.hash(), Sha256::digest(token.0.as_bytes()).as_slice()); + } +} diff --git a/src/db/error.rs b/src/db/error.rs index 097ecbb..7f2c3a5 100644 --- a/src/db/error.rs +++ b/src/db/error.rs @@ -8,6 +8,8 @@ pub enum DatabaseError { NotFound(String), #[error("Failed to save user: {0}")] InsertUserFailed(String), - #[error("Failed to save session for user; {0}")] + #[error("Failed to save session for user: {0}")] InsertSessionFailed(String), + #[error("Failed to save token for user: {0}")] + InsertTokenFailed(String), } diff --git a/src/db/mod.rs b/src/db/mod.rs index 3014798..6824487 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -1,4 +1,5 @@ -mod error; +pub mod api_token; +pub mod error; mod user_session; use self::error::DatabaseError; @@ -24,6 +25,13 @@ impl Default for Database { } } +pub struct DbConn(DbConnection); +impl DbConn { + pub fn inner(&mut self) -> &mut PgConnection { + &mut self.0 + } +} + impl Database { pub fn new() -> Self { // Create a connection pool @@ -43,8 +51,8 @@ impl Database { } /// Get a connection from the pool. - pub fn connection(&self) -> DbConnection { - self.pool.get().expect("db connection") + pub fn conn(&self) -> DbConn { + DbConn(self.pool.get().expect("db connection")) } } diff --git a/src/db/user_session.rs b/src/db/user_session.rs index 30d826d..318eec9 100644 --- a/src/db/user_session.rs +++ b/src/db/user_session.rs @@ -1,21 +1,19 @@ use super::error::DatabaseError; -use super::{api, models, schema}; -use super::{string_to_uuid, Database}; +use super::{api, models, schema, DbConn}; use diesel::prelude::*; use diesel::upsert::excluded; use std::time::{Duration, SystemTime}; +use uuid::Uuid; -impl Database { +impl DbConn { /// Insert a user session into the database and return the session ID. /// If the user doesn't exist, insert the user as well. /// If the user does exist, update the user's full name and avatar URL if they have changed. pub fn insert_user_session( - &self, - user: &api::User, + &mut self, + user: &api::auth::User, expires_in: u32, - ) -> Result { - let connection = &mut self.connection(); - + ) -> Result { // Insert or update a user let new_user = models::NewUser { full_name: user.full_name.clone(), @@ -35,7 +33,7 @@ impl Database { schema::users::full_name.eq(excluded(schema::users::full_name)), schema::users::avatar_url.eq(excluded(schema::users::avatar_url)), )) - .get_result(connection) + .get_result(self.inner()) .map_err(|_| DatabaseError::InsertUserFailed(user.github_login.clone()))?; let new_session = models::NewSession { @@ -47,21 +45,48 @@ impl Database { let saved_session = diesel::insert_into(schema::sessions::table) .values(&new_session) .returning(models::Session::as_returning()) - .get_result(connection) + .get_result(self.inner()) .map_err(|_| DatabaseError::InsertSessionFailed(user.github_login.clone()))?; - Ok(saved_session.id.to_string()) + Ok(saved_session) + } + + /// Fetch a user given the user ID. + pub fn get_user(&mut self, user_id: Uuid) -> Result { + schema::users::table + .filter(schema::users::id.eq(user_id)) + .select(models::User::as_returning()) + .first::(self.inner()) + .map_err(|_| DatabaseError::NotFound(user_id.to_string())) + } + + /// Fetch a user given the user ID. + pub fn get_session(&mut self, session_id: Uuid) -> Result { + schema::sessions::table + .filter(schema::sessions::id.eq(session_id)) + .select(models::Session::as_returning()) + .first::(self.inner()) + .map_err(|_| DatabaseError::NotFound(session_id.to_string())) } /// Fetch a user from the database for a given session ID. - pub fn get_user_for_session(&self, session_id: String) -> Result { - let session_uuid = string_to_uuid(session_id.clone())?; - let connection = &mut self.connection(); + pub fn get_user_for_session( + &mut self, + session_id: Uuid, + ) -> Result { schema::sessions::table .inner_join(schema::users::table) - .filter(schema::sessions::id.eq(session_uuid)) + .filter(schema::sessions::id.eq(session_id)) .select(models::User::as_returning()) - .first::(connection) - .map_err(|_| DatabaseError::NotFound(session_id)) + .first::(self.inner()) + .map_err(|_| DatabaseError::NotFound(session_id.to_string())) + } + + /// Delete a session given its ID. + pub fn delete_session(&mut self, session_id: Uuid) -> Result<(), DatabaseError> { + diesel::delete(schema::sessions::table.filter(schema::sessions::id.eq(session_id))) + .execute(self.inner()) + .map_err(|_| DatabaseError::NotFound(session_id.to_string()))?; + Ok(()) } } diff --git a/src/github.rs b/src/github.rs index 8da941e..85a8361 100644 --- a/src/github.rs +++ b/src/github.rs @@ -1,6 +1,6 @@ extern crate reqwest; -use crate::api::User; +use crate::api::auth::User; use serde::Deserialize; use std::env; use thiserror::Error; diff --git a/src/lib.rs b/src/lib.rs index 537aaa1..c633861 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,6 +1,7 @@ pub mod api; -pub mod cors; pub mod db; pub mod github; +pub mod middleware; pub mod models; pub mod schema; +pub mod util; diff --git a/src/main.rs b/src/main.rs index f61b992..8603d02 100644 --- a/src/main.rs +++ b/src/main.rs @@ -3,63 +3,96 @@ #[macro_use] extern crate rocket; +use forc_pub::api::api_token::{CreateTokenRequest, CreateTokenResponse, Token, TokensResponse}; +use forc_pub::api::publish::PublishRequest; use forc_pub::api::{ - LoginRequest, LoginResponse, PublishRequest, PublishResponse, SessionResponse, User, + auth::{LoginRequest, LoginResponse, UserResponse}, + ApiResult, EmptyResponse, }; -use forc_pub::cors::Cors; use forc_pub::db::Database; use forc_pub::github::handle_login; +use forc_pub::middleware::cors::Cors; +use forc_pub::middleware::session_auth::{SessionAuth, SESSION_COOKIE_NAME}; +use forc_pub::middleware::token_auth::TokenAuth; +use rocket::http::{Cookie, CookieJar}; use rocket::{serde::json::Json, State}; #[derive(Default)] -struct ServerState { +pub struct ServerState { pub db: Database, } /// The endpoint to authenticate with GitHub. #[post("/login", data = "")] -async fn login(state: &State, request: Json) -> Json { - match handle_login(request.code.clone()).await { - Ok((user, expires_in)) => match state.db.insert_user_session(&user, expires_in) { - Ok(session_id) => Json(LoginResponse { - user: Some(user), - session_id: Some(session_id), - error: None, - }), - Err(e) => Json(LoginResponse { - user: None, - session_id: None, - error: Some(e.to_string()), - }), - }, - Err(e) => Json(LoginResponse { - user: None, - session_id: None, - error: Some(e.to_string()), - }), - } +async fn login( + db: &State, + cookies: &CookieJar<'_>, + request: Json, +) -> ApiResult { + let (user, expires_in) = handle_login(request.code.clone()).await?; + let session = db.conn().insert_user_session(&user, expires_in)?; + let session_id = session.id.to_string(); + cookies.add(Cookie::build(SESSION_COOKIE_NAME, session_id.clone()).finish()); + Ok(Json(LoginResponse { user, session_id })) +} + +/// The endpoint to log out. +#[post("/logout")] +async fn logout(db: &State, auth: SessionAuth) -> ApiResult { + let session_id = auth.session_id; + let _ = db.conn().delete_session(session_id)?; + Ok(Json(EmptyResponse)) } /// The endpoint to authenticate with GitHub. -#[get("/session?")] -async fn session(state: &State, id: String) -> Json { - match state.db.get_user_for_session(id) { - Ok(user) => Json(SessionResponse { - user: Some(User::from(user)), - error: None, - }), - Err(error) => Json(SessionResponse { - user: None, - error: Some(error.to_string()), - }), - } +#[get("/user")] +fn user(auth: SessionAuth) -> Json { + Json(UserResponse { + user: auth.user.into(), + }) +} + +#[post("/new_token", data = "")] +fn new_token( + db: &State, + auth: SessionAuth, + request: Json, +) -> ApiResult { + let user = auth.user; + let (token, plain_token) = db.conn().new_token(user.id, request.name.clone())?; + Ok(Json(CreateTokenResponse { + token: Token { + // The only time we return the plain token is when it's created. + token: Some(plain_token.into()), + ..token.into() + }, + })) +} + +#[delete("/token/")] +fn delete_token(db: &State, auth: SessionAuth, id: String) -> ApiResult { + let user_id = auth.user.id; + let _ = db.conn().delete_token(user_id, id.clone())?; + Ok(Json(EmptyResponse)) +} + +#[get("/tokens")] +fn tokens(db: &State, auth: SessionAuth) -> ApiResult { + let user_id = auth.user.id; + let tokens = db.conn().get_tokens_for_user(user_id)?; + Ok(Json(TokensResponse { + tokens: tokens.into_iter().map(|t| t.into()).collect(), + })) } -/// The endpoint to publish a package version. #[post("/publish", data = "")] -fn publish(request: Json) -> Json { - eprintln!("Received request: {:?}", request); - Json(PublishResponse { error: None }) +fn publish(request: Json, auth: TokenAuth) -> ApiResult { + println!( + "Publishing: {:?} for token: {:?}", + request, auth.token.friendly_name + ); + + Ok(Json(EmptyResponse)) } /// Catches all OPTION requests in order to get the CORS related Fairing triggered. @@ -84,8 +117,21 @@ fn health() -> String { #[launch] fn rocket() -> _ { rocket::build() - .manage(ServerState::default()) + .manage(Database::default()) .attach(Cors) - .mount("/", routes![login, session, publish, all_options, health]) + .mount( + "/", + routes![ + login, + logout, + user, + new_token, + delete_token, + publish, + tokens, + all_options, + health + ], + ) .register("/", catchers![not_found]) } diff --git a/src/middleware/cors.rs b/src/middleware/cors.rs new file mode 100644 index 0000000..7e7c99b --- /dev/null +++ b/src/middleware/cors.rs @@ -0,0 +1,100 @@ +use dotenvy::dotenv; +use regex::Regex; +use reqwest::header::ACCESS_CONTROL_ALLOW_ORIGIN; +use rocket::fairing::{Fairing, Info, Kind}; +use rocket::http::hyper::header; +use rocket::http::{Header, HeaderMap}; +use rocket::{Request, Response}; +use std::env; + +// Build an open cors module so this server can be used accross many locations on the web. +pub struct Cors; + +fn get_allowed_origin(headers: &HeaderMap<'_>) -> Option { + dotenv().ok(); + + if let Some(req_origin) = headers.get_one(header::ORIGIN.as_str()) { + // If the environment variable CORS_HTTP_ORIGIN is set, only allow that origin. + if let Ok(env_origin) = env::var("CORS_HTTP_ORIGIN") { + if req_origin == env_origin.as_str() { + return Some(env_origin); + } + } + + // If the request origin matches the allowed regex, allow only the request origin. + let re = Regex::new( + r"^https://forc(((.pub)|((-pub)(-git-[a-zA-Z0-9-]+-fuel-labs)?\.vercel\.app)))$", + ) + .unwrap(); + if re.is_match(req_origin) { + return Some(req_origin.to_string()); + } + } + None +} + +// Build Cors Fairing. +#[rocket::async_trait] +impl Fairing for Cors { + fn info(&self) -> Info { + Info { + name: "Cross-Origin-Resource-Sharing Fairing", + kind: Kind::Response, + } + } + + // Build an Access-Control-Allow-Origin policy Response header. + async fn on_response<'r>(&self, request: &'r Request<'_>, response: &mut Response<'r>) { + if let Some(origin) = get_allowed_origin(request.headers()) { + response.set_header(Header::new(ACCESS_CONTROL_ALLOW_ORIGIN.as_str(), origin)); + } + response.set_header(Header::new( + header::ACCESS_CONTROL_ALLOW_METHODS.as_str(), + "POST, PATCH, PUT, DELETE, HEAD, OPTIONS, GET", + )); + response.set_header(Header::new( + header::ACCESS_CONTROL_ALLOW_HEADERS.as_str(), + "*, Access-Control-Request-Headers, Content-Type", + )); + response.set_header(Header::new( + header::ACCESS_CONTROL_ALLOW_CREDENTIALS.as_str(), + "true", + )); + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_get_allowed_origin() { + let mut headers = HeaderMap::new(); + + let test_cases = [ + ("https://forc.pub", true), + ("https://forc-pub.vercel.app", true), + ("https://forc-pub-git-api-tokens-fuel-labs.vercel.app", true), + ("https://forc.pub/", false), + ("https://forc.pub/tokens", false), + ("https://forc.com.pub", false), + ("https://forc-spub.vercel.app", false), + ]; + + env::remove_var("CORS_HTTP_ORIGIN"); + test_cases.iter().for_each(|(origin, expected)| { + headers.add(Header::new(header::ORIGIN.as_str(), *origin)); + match expected { + true => assert_eq!(get_allowed_origin(&headers), Some(origin.to_string())), + false => assert!(get_allowed_origin(&headers).is_none()), + } + headers.remove(header::ORIGIN.as_str()); + }); + + // Test with CORS_HTTP_ORIGIN set. + let origin = "http://localhost:3000"; + env::set_var("CORS_HTTP_ORIGIN", origin); + headers.add(Header::new(header::ORIGIN.as_str(), origin)); + assert_eq!(get_allowed_origin(&headers), Some(origin.to_string())) + } +} diff --git a/src/middleware/mod.rs b/src/middleware/mod.rs new file mode 100644 index 0000000..ff75b40 --- /dev/null +++ b/src/middleware/mod.rs @@ -0,0 +1,3 @@ +pub mod cors; +pub mod session_auth; +pub mod token_auth; diff --git a/src/middleware/session_auth.rs b/src/middleware/session_auth.rs new file mode 100644 index 0000000..9caf84c --- /dev/null +++ b/src/middleware/session_auth.rs @@ -0,0 +1,56 @@ +use crate::db::Database; +use crate::models; +use rocket::http::Status; +use rocket::request::{FromRequest, Outcome}; +use rocket::Request; +use std::time::SystemTime; +use uuid::Uuid; + +pub const SESSION_COOKIE_NAME: &str = "fp_session"; + +pub struct SessionAuth { + pub user: models::User, + pub session_id: Uuid, +} + +#[derive(Debug)] +pub enum SessionAuthError { + Missing, + Invalid, + DatabaseConnection, +} + +#[rocket::async_trait] +impl<'r> FromRequest<'r> for SessionAuth { + type Error = SessionAuthError; + + async fn from_request(request: &'r Request<'_>) -> Outcome { + // TODO: use fairing for db connection? + // let db = try_outcome!(request.guard::().await); + + let mut db = match request.rocket().state::() { + Some(db) => db.conn(), + None => { + return Outcome::Failure(( + Status::InternalServerError, + SessionAuthError::DatabaseConnection, + )) + } + }; + if let Some(Some(session_id)) = request + .cookies() + .get(SESSION_COOKIE_NAME) + .map(|c| Uuid::parse_str(c.value()).ok()) + { + if let Ok(session) = db.get_session(session_id) { + if let Ok(user) = db.get_user_for_session(session_id) { + if session.expires_at > SystemTime::now() { + return Outcome::Success(SessionAuth { user, session_id }); + } + } + } + return Outcome::Failure((Status::Unauthorized, SessionAuthError::Invalid)); + } + return Outcome::Failure((Status::Unauthorized, SessionAuthError::Missing)); + } +} diff --git a/src/middleware/token_auth.rs b/src/middleware/token_auth.rs new file mode 100644 index 0000000..a15654b --- /dev/null +++ b/src/middleware/token_auth.rs @@ -0,0 +1,49 @@ +use crate::db::api_token::PlainToken; +use crate::db::Database; +use crate::models; +use rocket::http::hyper::header; +use rocket::http::Status; +use rocket::request::{FromRequest, Outcome}; +use rocket::Request; + +pub struct TokenAuth { + pub token: models::ApiToken, +} + +#[derive(Debug)] +pub enum TokenAuthError { + Missing, + Invalid, + DatabaseConnection, +} + +#[rocket::async_trait] +impl<'r> FromRequest<'r> for TokenAuth { + type Error = TokenAuthError; + + async fn from_request(request: &'r Request<'_>) -> Outcome { + // TODO: use fairing for db connection? + // let db = try_outcome!(request.guard::().await); + + let mut db = match request.rocket().state::() { + Some(db) => db.conn(), + None => { + return Outcome::Failure(( + Status::InternalServerError, + TokenAuthError::DatabaseConnection, + )) + } + }; + + if let Some(auth_header) = request.headers().get_one(header::AUTHORIZATION.as_str()) { + if auth_header.starts_with("Bearer ") { + let token = auth_header.trim_start_matches("Bearer "); + if let Ok(token) = db.get_token(PlainToken::from(token.to_string())) { + return Outcome::Success(TokenAuth { token }); + } + } + return Outcome::Failure((Status::Unauthorized, TokenAuthError::Invalid)); + } + return Outcome::Failure((Status::Unauthorized, TokenAuthError::Missing)); + } +} diff --git a/src/models.rs b/src/models.rs index 93c2096..d6cbdd6 100644 --- a/src/models.rs +++ b/src/models.rs @@ -2,7 +2,7 @@ use diesel::prelude::*; use std::time::SystemTime; use uuid::Uuid; -#[derive(Queryable, Selectable, Debug)] +#[derive(Queryable, Selectable, Debug, Clone)] #[diesel(table_name = crate::schema::users)] #[diesel(check_for_backend(diesel::pg::Pg))] pub struct User { @@ -43,3 +43,23 @@ pub struct NewSession { pub user_id: Uuid, pub expires_at: SystemTime, } + +#[derive(Queryable, Selectable, Debug, PartialEq, Eq)] +#[diesel(table_name = crate::schema::api_tokens)] +#[diesel(check_for_backend(diesel::pg::Pg))] +pub struct ApiToken { + pub id: Uuid, + pub user_id: Uuid, + pub friendly_name: String, + pub expires_at: Option, + pub created_at: SystemTime, +} + +#[derive(Insertable)] +#[diesel(table_name = crate::schema::api_tokens)] +pub struct NewApiToken { + pub user_id: Uuid, + pub friendly_name: String, + pub token: Vec, + pub expires_at: Option, +} diff --git a/src/schema.rs b/src/schema.rs index 9f19130..6152c56 100644 --- a/src/schema.rs +++ b/src/schema.rs @@ -1,5 +1,16 @@ // @generated automatically by Diesel CLI. +diesel::table! { + api_tokens (id) { + id -> Uuid, + user_id -> Uuid, + friendly_name -> Varchar, + token -> Bytea, + expires_at -> Nullable, + created_at -> Timestamp, + } +} + diesel::table! { sessions (id) { id -> Uuid, @@ -22,6 +33,7 @@ diesel::table! { } } +diesel::joinable!(api_tokens -> users (user_id)); diesel::joinable!(sessions -> users (user_id)); -diesel::allow_tables_to_appear_in_same_query!(sessions, users,); +diesel::allow_tables_to_appear_in_same_query!(api_tokens, sessions, users,); diff --git a/src/util.rs b/src/util.rs new file mode 100644 index 0000000..549d5b7 --- /dev/null +++ b/src/util.rs @@ -0,0 +1,9 @@ +use std::time::SystemTime; + +pub fn sys_time_to_epoch(sys_time: SystemTime) -> u64 { + sys_time + .duration_since(SystemTime::UNIX_EPOCH) + .expect("convert time to epoch") + .as_secs() + * 1000 +} diff --git a/tests/db_integration.rs b/tests/db_integration.rs index 61af983..0e47330 100644 --- a/tests/db_integration.rs +++ b/tests/db_integration.rs @@ -1,7 +1,7 @@ use diesel::RunQueryDsl as _; use forc_pub::api; -use forc_pub::db::Database; -use uuid::Uuid; +use forc_pub::db::{Database, DbConn}; +use serial_test::serial; /// Note: Integration tests for the database module assume that the database is running and that the DATABASE_URL environment variable is set. /// This should be done by running `./scripts/start_local_db.sh` before running the tests. @@ -12,19 +12,23 @@ const TEST_EMAIL_1: &str = "alice@bob.com"; const TEST_URL_1: &str = "url1.url"; const TEST_URL_2: &str = "url2.url"; const TEST_LOGIN_2: &str = "foobar"; +const TEST_TOKEN_NAME_1: &str = "test token 1"; +const TEST_TOKEN_NAME_2: &str = "test token 2"; -fn clear_tables(db: &Database) { - let connection = &mut db.connection(); +fn clear_tables(db: &mut DbConn) { + diesel::delete(forc_pub::schema::api_tokens::table) + .execute(db.inner()) + .expect("clear api_tokens table"); diesel::delete(forc_pub::schema::sessions::table) - .execute(connection) + .execute(db.inner()) .expect("clear sessions table"); diesel::delete(forc_pub::schema::users::table) - .execute(connection) + .execute(db.inner()) .expect("clear users table"); } -fn mock_user_1() -> api::User { - api::User { +fn mock_user_1() -> api::auth::User { + api::auth::User { github_login: TEST_LOGIN_1.to_string(), full_name: TEST_FULL_NAME_1.to_string(), email: Some(TEST_EMAIL_1.to_string()), @@ -34,32 +38,30 @@ fn mock_user_1() -> api::User { } } -fn mock_user_2() -> api::User { - api::User { +fn mock_user_2() -> api::auth::User { + api::auth::User { github_login: TEST_LOGIN_2.to_string(), ..Default::default() } } +#[serial] #[test] -fn test_multiple_user_sessions() { - let db = Database::default(); +fn test_user_sessions() { + let db = &mut Database::default().conn(); let user1 = mock_user_1(); let user2 = mock_user_2(); let session1 = db.insert_user_session(&user1, 1000).expect("result is ok"); - Uuid::parse_str(session1.as_str()).expect("result is a valid UUID"); // Insert an existing user let session2 = db.insert_user_session(&user1, 1000).expect("result is ok"); - Uuid::parse_str(session2.as_str()).expect("result is a valid UUID"); // Insert another user let session3 = db.insert_user_session(&user2, 1000).expect("result is ok"); - Uuid::parse_str(session3.as_str()).expect("result is a valid UUID"); - let result = db.get_user_for_session(session1).expect("result is ok"); + let result = db.get_user_for_session(session1.id).expect("result is ok"); assert_eq!(result.github_login, TEST_LOGIN_1); assert_eq!(result.full_name, TEST_FULL_NAME_1); assert_eq!(result.email.expect("is some"), TEST_EMAIL_1); @@ -67,11 +69,55 @@ fn test_multiple_user_sessions() { assert_eq!(result.github_url, TEST_URL_2); assert!(result.is_admin); - let result = db.get_user_for_session(session2).expect("result is ok"); + let result = db.get_user_for_session(session2.id).expect("result is ok"); assert_eq!(result.github_login, TEST_LOGIN_1); - let result = db.get_user_for_session(session3).expect("result is ok"); + let result = db.get_user_for_session(session3.id).expect("result is ok"); assert_eq!(result.github_login, TEST_LOGIN_2); - clear_tables(&db); + clear_tables(db); +} + +#[test] +#[serial] +fn test_api_tokens() { + let db = &mut Database::default().conn(); + + let session = db + .insert_user_session(&mock_user_1(), 1000) + .expect("result is ok"); + let user = db.get_user_for_session(session.id).expect("result is ok"); + + // Insert tokens + let (token1, plain_token1) = db + .new_token(user.id, TEST_TOKEN_NAME_1.into()) + .expect("result is ok"); + let (token2, plain_token2) = db + .new_token(user.id, TEST_TOKEN_NAME_2.into()) + .expect("result is ok"); + + assert_eq!(token1.friendly_name, TEST_TOKEN_NAME_1); + assert_eq!(token1.expires_at, None); + assert_eq!(token2.friendly_name, TEST_TOKEN_NAME_2); + assert_eq!(token2.expires_at, None); + + // Test token hashing + assert_eq!(token1, db.get_token(plain_token1).expect("test token 1")); + assert_eq!(token2, db.get_token(plain_token2).expect("test token 2")); + + // Get tokens + let tokens = db.get_tokens_for_user(user.id).expect("result is ok"); + assert_eq!(tokens.len(), 2); + + // Delete tokens + db.delete_token(user.id, token1.id.into()) + .expect("result is ok"); + let tokens = db.get_tokens_for_user(user.id).expect("result is ok"); + assert_eq!(tokens.len(), 1); + db.delete_token(user.id, token2.id.into()) + .expect("result is ok"); + let tokens = db.get_tokens_for_user(user.id).expect("result is ok"); + assert_eq!(tokens.len(), 0); + + clear_tables(db); }